prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>node.go<|end_file_name|><|fim▁begin|>package tree
type Node struct {
Value int<|fim▁hole|><|fim▁end|> | Left, Right *Node
} |
<|file_name|>page-title.service.spec.ts<|end_file_name|><|fim▁begin|>// Copyright 2019 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.<|fim▁hole|>//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Unit test for the page title service.
*/
import { PageTitleService } from 'services/page-title.service';
import { TestBed } from '@angular/core/testing';
import { Title } from '@angular/platform-browser';
describe('Page title service', () => {
let pts: PageTitleService;
let titleService: Title;
beforeEach(() => {
TestBed.configureTestingModule({
providers: [PageTitleService, Title]
});
titleService = TestBed.get(Title);
pts = TestBed.get(PageTitleService);
});
it('should correctly set the page title', () => {
pts.setPageTitle('First Title');
expect(titleService.getTitle()).toEqual('First Title');
pts.setPageTitle('Second Title');
expect(titleService.getTitle()).toEqual('Second Title');
});
it('should correctly set the page title for mobile view', () => {
pts.setPageTitleForMobileView('First Title');
expect(pts.getPageTitleForMobileView()).toEqual('First Title');
pts.setPageTitleForMobileView('Second Title');
expect(pts.getPageTitleForMobileView()).toEqual('Second Title');
});
it('should correctly set the page subtitle for mobile view', () => {
pts.setPageSubtitleForMobileView('First Subtitle');
expect(pts.getPageSubtitleForMobileView()).toEqual('First Subtitle');
pts.setPageSubtitleForMobileView('Second Subtitle');
expect(pts.getPageSubtitleForMobileView()).toEqual('Second Subtitle');
});
});<|fim▁end|> | // You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# jQuery File Upload Plugin GAE Python Example 2.1.1
# https://github.com/blueimp/jQuery-File-Upload
#
# Copyright 2011, Sebastian Tschan
# https://blueimp.net
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
from __future__ import with_statement
<|fim▁hole|>from google.appengine.api import files, images
from google.appengine.ext import blobstore, deferred
from google.appengine.ext.webapp import blobstore_handlers
import json
import re
import urllib
import webapp2
WEBSITE = 'http://blueimp.github.io/jQuery-File-Upload/'
MIN_FILE_SIZE = 1 # bytes
MAX_FILE_SIZE = 5000000 # bytes
IMAGE_TYPES = re.compile('image/(gif|p?jpeg|(x-)?png)')
ACCEPT_FILE_TYPES = IMAGE_TYPES
THUMBNAIL_MODIFICATOR = '=s80' # max width / height
EXPIRATION_TIME = 300 # seconds
def cleanup(blob_keys):
blobstore.delete(blob_keys)
class UploadHandler(webapp2.RequestHandler):
def initialize(self, request, response):
super(UploadHandler, self).initialize(request, response)
self.response.headers['Access-Control-Allow-Origin'] = '*'
self.response.headers[
'Access-Control-Allow-Methods'
] = 'OPTIONS, HEAD, GET, POST, PUT, DELETE'
self.response.headers[
'Access-Control-Allow-Headers'
] = 'Content-Type, Content-Range, Content-Disposition'
def validate(self, file):
if file['size'] < MIN_FILE_SIZE:
file['error'] = 'File is too small'
elif file['size'] > MAX_FILE_SIZE:
file['error'] = 'File is too big'
elif not ACCEPT_FILE_TYPES.match(file['type']):
file['error'] = 'Filetype not allowed'
else:
return True
return False
def get_file_size(self, file):
file.seek(0, 2) # Seek to the end of the file
size = file.tell() # Get the position of EOF
file.seek(0) # Reset the file position to the beginning
return size
def write_blob(self, data, info):
blob = files.blobstore.create(
mime_type=info['type'],
_blobinfo_uploaded_filename=info['name']
)
with files.open(blob, 'a') as f:
f.write(data)
files.finalize(blob)
return files.blobstore.get_blob_key(blob)
def handle_upload(self):
results = []
blob_keys = []
for name, fieldStorage in self.request.POST.items():
if type(fieldStorage) is unicode:
continue
result = {}
result['name'] = re.sub(
r'^.*\\',
'',
fieldStorage.filename
)
result['type'] = fieldStorage.type
result['size'] = self.get_file_size(fieldStorage.file)
if self.validate(result):
blob_key = str(
self.write_blob(fieldStorage.value, result)
)
blob_keys.append(blob_key)
result['deleteType'] = 'DELETE'
result['deleteUrl'] = self.request.host_url +\
'/?key=' + urllib.quote(blob_key, '')
if (IMAGE_TYPES.match(result['type'])):
try:
result['url'] = images.get_serving_url(
blob_key,
secure_url=self.request.host_url.startswith(
'https'
)
)
result['thumbnailUrl'] = result['url'] +\
THUMBNAIL_MODIFICATOR
except: # Could not get an image serving url
pass
if not 'url' in result:
result['url'] = self.request.host_url +\
'/' + blob_key + '/' + urllib.quote(
result['name'].encode('utf-8'), '')
results.append(result)
deferred.defer(
cleanup,
blob_keys,
_countdown=EXPIRATION_TIME
)
return results
def options(self):
pass
def head(self):
pass
def get(self):
self.redirect(WEBSITE)
def post(self):
if (self.request.get('_method') == 'DELETE'):
return self.delete()
result = {'files': self.handle_upload()}
s = json.dumps(result, separators=(',', ':'))
redirect = self.request.get('redirect')
if redirect:
return self.redirect(str(
redirect.replace('%s', urllib.quote(s, ''), 1)
))
if 'application/json' in self.request.headers.get('Accept'):
self.response.headers['Content-Type'] = 'application/json'
self.response.write(s)
def delete(self):
key = self.request.get('key') or ''
blobstore.delete(key)
s = json.dumps({key: True}, separators=(',', ':'))
if 'application/json' in self.request.headers.get('Accept'):
self.response.headers['Content-Type'] = 'application/json'
self.response.write(s)
class DownloadHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self, key, filename):
if not blobstore.get(key):
self.error(404)
else:
# Prevent browsers from MIME-sniffing the content-type:
self.response.headers['X-Content-Type-Options'] = 'nosniff'
# Cache for the expiration time:
self.response.headers['Cache-Control'] = 'public,max-age=%d' % EXPIRATION_TIME
# Send the file forcing a download dialog:
self.send_blob(key, save_as=filename, content_type='application/octet-stream')
app = webapp2.WSGIApplication(
[
('/', UploadHandler),
('/([^/]+)/([^/]+)', DownloadHandler)
],
debug=True
)<|fim▁end|> | |
<|file_name|>test_max_column.py<|end_file_name|><|fim▁begin|>#!../../../.env/bin/python
import os
import numpy as np
import time
a = np.array([
[1,0,3],
[0,2,1],
[0.1,0,0],
])
print a
row = 1
col = 2
print a[row][col]
assert a[row][col] == 1
<|fim▁hole|>print 'expected_max_rows:', expected_max_rows
print 'expected_max_values:', expected_max_values
t0 = time.time()
actual_max_rows = list(np.argmax(a, axis=0))
td = time.time() - t0
actual_max_values = list(np.amax(a, axis=0))
print 'td:', round(td, 4)
print 'actual_max_rows:', actual_max_rows
print 'actual_max_values:', actual_max_values
assert actual_max_rows == expected_max_rows
assert actual_max_values == expected_max_values<|fim▁end|> | expected_max_rows = [0, 1, 0]
expected_max_values = [1, 2, 3] |
<|file_name|>say.py<|end_file_name|><|fim▁begin|>from command import Command, is_command
from event import Event
class Say(Command):
shortname = 'say'
name = 'Say something to someone, or in the public chat'
@is_command
def say(self, player, *args):
if args[0] in self.world.players:
prefix = "(private) <%s> " % player.name
# a message to a user
msg_base = ' '.join(args[1:])
msg = prefix + ' '.join(args[1:])
target_player = self.find_player(args[0])
self.tell_player(args[0], msg)
self.world.emit_scripting_event('say', {
'source': player.to_dict(),
'target': target_player.to_dict(),
'msg': msg_base
}, scope=[target_player])
else:
prefix = "<%s> " % player.name
msg_base = ' '.join(args)
msg = prefix + ' '.join(args)<|fim▁hole|> self.world.emit_scripting_event('say', {
'source': player.to_dict(),
'target': player.location.to_dict(),
'msg': msg_base
}, scope=[player.location, player])<|fim▁end|> |
for p in self.world.players:
self.tell_player(p, msg) |
<|file_name|>InformationDispatcher.py<|end_file_name|><|fim▁begin|>from vt_manager.controller.actions.ActionController import ActionController
from vt_manager.controller.drivers.VTDriver import VTDriver
from vt_manager.models.Action import Action
from vt_manager.models.VirtualMachine import VirtualMachine
import xmlrpclib, threading, logging, copy
from vt_manager.communication.utils.XmlHelper import XmlHelper
from vt_manager.models.resourcesHash import resourcesHash
class InformationDispatcher():
@staticmethod
def listResources(remoteHashValue, projectUUID = 'None', sliceUUID ='None'):
logging.debug("Enter listResources")
infoRspec = XmlHelper.getSimpleInformation()
servers = VTDriver.getAllServers()
baseVM = copy.deepcopy(infoRspec.response.information.resources.server[0].virtual_machine[0])
if not servers:
logging.debug("No VTServers available")
infoRspec.response.information.resources.server.pop()
resourcesString = XmlHelper.craftXmlClass(infoRspec)
localHashValue = str(hash(resourcesString))
else:
for sIndex, server in enumerate(servers):
if(sIndex == 0):
baseServer = copy.deepcopy(infoRspec.response.information.resources.server[0])
if(sIndex != 0):
newServer = copy.deepcopy(baseServer)
infoRspec.response.information.resources.server.append(newServer)
InformationDispatcher.__ServerModelToClass(server, infoRspec.response.information.resources.server[sIndex] )
if (projectUUID is not 'None'):
vms = server.getVMs(projectId = projectUUID)
else:
vms = server.getVMs()
if not vms:
logging.debug("No VMs available")
if infoRspec.response.information.resources.server[sIndex].virtual_machine:
infoRspec.response.information.resources.server[sIndex].virtual_machine.pop()
elif (sliceUUID is not 'None'):
vms = vms.filter(sliceId = sliceUUID)
if not vms:
logging.error("No VMs available")
infoRspec.response.information.resources.server[sIndex].virtual_machine.pop()
for vIndex, vm in enumerate(vms):
if (vIndex != 0):
newVM = copy.deepcopy(baseVM)
infoRspec.response.information.resources.server[sIndex].virtual_machine.append(newVM)
InformationDispatcher.__VMmodelToClass(vm, infoRspec.response.information.resources.server[sIndex].virtual_machine[vIndex])
resourcesString = XmlHelper.craftXmlClass(infoRspec)
localHashValue = str(hash(resourcesString))
try:
rHashObject = resourcesHash.objects.get(projectUUID = projectUUID, sliceUUID = sliceUUID)
rHashObject.hashValue = localHashValue
rHashObject.save()
except:
rHashObject = resourcesHash(hashValue = localHashValue, projectUUID= projectUUID, sliceUUID = sliceUUID)
rHashObject.save()
if remoteHashValue == rHashObject.hashValue:
return localHashValue, ''
else:
return localHashValue, resourcesString
@staticmethod
def listVMTemplatesInfo(serverUUID):
#def listVMTemplatesInfo(serverUUID, callbackURL):
logging.debug("Enter listVMTemplatesInfo")
server = VTDriver.getServerByUUID(serverUUID)<|fim▁hole|> templates_info = xmlrpc_server.list_vm_templates(server.getAgentPassword())
#templates_info = xmlrpc_server.list_vm_templates(callbackURL, server.getAgentPassword())
return str(templates_info)
@staticmethod
def forceListActiveVMs(serverID='None', vmID='None'):
if serverID != 'None':
server = VTDriver.getServerById(serverID)
vtam_vms = server.getVMs()
else:
if vmID != 'None':
servers = VTDriver.getAllServers()
vtam_vms = list()
for server in servers:
vtam_vms = server.getVMs(id=int(vmID))
if vtam_vms:
vmID = vtam_vms[0].getUUID()
break
if not vtam_vms:
raise Exception("VM not found")
xmlrpc_server = xmlrpclib.Server(server.getAgentURL())
# Handle safely the connection against the agent
try:
server_active_vms = xmlrpc_server.force_list_active_vms(server.getAgentPassword(), vmID)
for vm in vtam_vms:
if vm.getUUID() in server_active_vms.keys():
vm.setState("running")
vm.save()
else:
# XXX: avoiding "on queue" and "unknown" states to avoid bad management
#if vm.getState() in ['deleting...', 'failed', 'on queue', 'unknown']:
if vm.getState() in ["deleting...", "failed"]:
child = vm.getChildObject()
server = vm.Server.get()
#Action.objects.all().filter(objectUUID = vm.uuid).delete()
server.deleteVM(vm)
# Keep actions table up-to-date after each deletion
vm_uuids = [ vm.uuid for vm in VirtualMachine.objects.all() ]
Action.objects.all().exclude(objectUUID__in = vm_uuids).delete()
elif vm.getState() in ["running", "starting...", "stopping..."] :
vm.setState("stopped")
vm.save()
else:
continue
except:
server_active_vms = dict()
return server_active_vms
@staticmethod
def __ServerModelToClass(sModel, sClass ):
sClass.name = sModel.getName()
#XXX: CHECK THIS
sClass.id = sModel.id
sClass.uuid = sModel.getUUID()
sClass.operating_system_type = sModel.getOSType()
sClass.operating_system_distribution = sModel.getOSDistribution()
sClass.operating_system_version = sModel.getOSVersion()
sClass.virtualization_type = sModel.getVirtTech()
ifaces = sModel.getNetworkInterfaces()
for ifaceIndex, iface in enumerate(ifaces):
if ifaceIndex != 0:
newInterface = copy.deepcopy(sClass.interfaces.interface[0])
sClass.interfaces.interface.append(newInterface)
if iface.isMgmt:
sClass.interfaces.interface[ifaceIndex].ismgmt = True
else:
sClass.interfaces.interface[ifaceIndex].ismgmt = False
sClass.interfaces.interface[ifaceIndex].name = iface.name
sClass.interfaces.interface[ifaceIndex].switch_id= iface.switchID
sClass.interfaces.interface[ifaceIndex].switch_port = iface.port
@staticmethod
def __VMmodelToClass(VMmodel, VMxmlClass):
VMxmlClass.name = VMmodel.getName()
VMxmlClass.uuid = VMmodel.getUUID()
VMxmlClass.status = VMmodel.getState()
VMxmlClass.project_id = VMmodel.getProjectId()
VMxmlClass.slice_id = VMmodel.getSliceId()
VMxmlClass.project_name = VMmodel.getProjectName()
VMxmlClass.slice_name = VMmodel.getSliceName()
VMxmlClass.operating_system_type = VMmodel.getOSType()
VMxmlClass.operating_system_version = VMmodel.getOSVersion()
VMxmlClass.operating_system_distribution = VMmodel.getOSDistribution()
VMxmlClass.virtualization_type = VMmodel.Server.get().getVirtTech()
VMxmlClass.server_id = VMmodel.Server.get().getUUID()
VMxmlClass.xen_configuration.hd_setup_type = VMmodel.getHdSetupType()
VMxmlClass.xen_configuration.hd_origin_path = VMmodel.getHdOriginPath()
VMxmlClass.xen_configuration.virtualization_setup_type = VMmodel.getVirtualizationSetupType()
VMxmlClass.xen_configuration.memory_mb = VMmodel.getMemory()
ActionController.PopulateNetworkingParams(VMxmlClass.xen_configuration.interfaces.interface, VMmodel)<|fim▁end|> | xmlrpc_server = xmlrpclib.Server(server.getAgentURL()) |
<|file_name|>mcbd.py<|end_file_name|><|fim▁begin|><|fim▁hole|>Usage: python3 mcbd.py save <keyword> - saves clipboard for keyword.
python3 mcbd.py <keyword> - loads to clipboard for keyword.
python3 mcbd.py list - loads all keywords to clipboard.
python3 mcbd.py delete <keyword> - deletes for keyword.
python3 mcbd.py delete - deletes all keywords.
'''
'''
Say you have the boring task of filling out many forms in a web page or
software with several text fields. The clipboard saves you from typing
the same text over and over again. But only one thing can be on the
clipboard at a time. If you have several different pieces of text that
you need to copy and paste, you have to keep highlighting and copying
the same few things over and over again. You can write a Python
program to keep track of multiple pieces of text.
'''
'''
Extend the multiclipboard program in this chapter so that it has a
delete <keyword> command line argument that will delete a keyword from
the shelf. Then add a delete command line argument that will delete all
keywords.
'''
import pyperclip
import shelve
import sys
import textwrap
def print_usage():
print(textwrap.dedent(
'''
Usage: python3 mcbd.py save <keyword> - saves clipboard for keyword.
python3 mcbd.py <keyword> - loads to clipboard for keyword.
python3 mcbd.py list - loads all keywords to clipboard.
python3 mcbd.py delete <keyword> - deletes for keyword.
python3 mcbd.py delete - deletes all keywords.
'''))
mcbShelf = shelve.open('mcb') # file created if not already existing
# save or delete specified keywords
if len(sys.argv) == 3:
if sys.argv[1].lower() == 'save':
mcbShelf[sys.argv[2]] = pyperclip.paste()
print('clipboard saved under keyword:', sys.argv[2])
elif sys.argv[1].lower() == 'delete':
del mcbShelf[sys.argv[2]]
print('deleted keyword:', sys.argv[2])
# list or delete all keywords or fetch one
elif len(sys.argv) == 2:
if sys.argv[1].lower() == 'list':
pyperclip.copy(str(list(mcbShelf.keys())))
print('all keywords copied to clipboard')
elif sys.argv[1].lower() == 'delete':
mcbShelf.clear()
print('all keywords deleted')
elif sys.argv[1] in mcbShelf:
pyperclip.copy(mcbShelf[sys.argv[1]])
print('copied to clipboard for keyword:', sys.argv[1])
else:
print('no such keyword:', sys.argv[1])
print_usage()
else:
print_usage()
mcbShelf.close()<|fim▁end|> | #! python3
'''
mcbd.py - Saves and loads pieces of text from/to the clipboard to/from a
shelf type file. |
<|file_name|>helpers.js<|end_file_name|><|fim▁begin|>import {mount, render, shallow} from 'enzyme';
global.mount = mount;<|fim▁hole|><|fim▁end|> | global.render = render;
global.shallow = shallow; |
<|file_name|>forgotpasswordpin.js<|end_file_name|><|fim▁begin|>define([], function () {
function processForgotPasswordResult(result) {
if (result.Success) {
var msg = Globalize.translate('MessagePasswordResetForUsers');
msg += '<br/>';
msg += '<br/>';
msg += result.UsersReset.join('<br/>');
Dashboard.alert({
message: msg,
title: Globalize.translate('HeaderPasswordReset'),
callback: function () {
window.location.href = 'login.html';
}
});
return;
}
Dashboard.alert({
<|fim▁hole|> }
return function (view, params) {
function onSubmit(e) {
ApiClient.ajax({
type: 'POST',
url: ApiClient.getUrl('Users/ForgotPassword/Pin'),
dataType: 'json',
data: {
Pin: view.querySelector('#txtPin').value
}
}).then(processForgotPasswordResult);
e.preventDefault();
return false;
}
view.querySelector('form').addEventListener('submit', onSubmit);
};
});<|fim▁end|> | message: Globalize.translate('MessageInvalidForgotPasswordPin'),
title: Globalize.translate('HeaderPasswordReset')
});
return; |
<|file_name|>GfshParseResult.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.springframework.shell.event.ParseResult;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.internal.cli.shell.GfshExecutionStrategy;
import org.apache.geode.management.internal.cli.shell.OperationInvoker;
/**
* Immutable representation of the outcome of parsing a given shell line. * Extends
* {@link ParseResult} to add a field to specify the command string that was input by the user.
*
* <p>
* Some commands are required to be executed on a remote GemFire managing member. These should be
* marked with the annotation {@link CliMetaData#shellOnly()} set to <code>false</code>.
* {@link GfshExecutionStrategy} will detect whether the command is a remote command and send it to
* ManagementMBean via {@link OperationInvoker}.
*
*
* @since GemFire 7.0
*/
public class GfshParseResult extends ParseResult {
private String userInput;
private String commandName;
private Map<String, String> paramValueStringMap = new HashMap<>();
/**
* Creates a GfshParseResult instance to represent parsing outcome.
*
* @param method Method associated with the command
* @param instance Instance on which this method has to be executed
* @param arguments arguments of the method
* @param userInput user specified commands string
*/
protected GfshParseResult(final Method method, final Object instance, final Object[] arguments,
final String userInput) {
super(method, instance, arguments);
this.userInput = userInput.trim();
CliCommand cliCommand = method.getAnnotation(CliCommand.class);
commandName = cliCommand.value()[0];
Annotation[][] parameterAnnotations = method.getParameterAnnotations();
if (arguments == null) {
return;
}
for (int i = 0; i < arguments.length; i++) {
Object argument = arguments[i];
if (argument == null) {
continue;
}
CliOption cliOption = getCliOption(parameterAnnotations, i);
String argumentAsString;
if (argument instanceof Object[]) {
argumentAsString = StringUtils.join((Object[]) argument, ",");
} else {
argumentAsString = argument.toString();<|fim▁hole|> paramValueStringMap.put(cliOption.key()[0], argumentAsString);
}
}
/**
* @return the userInput
*/
public String getUserInput() {
return userInput;
}
/**
* Used only in tests and command pre-execution for validating arguments
*/
public String getParamValue(String param) {
return paramValueStringMap.get(param);
}
/**
* Used only in tests and command pre-execution for validating arguments
*
* @return the unmodifiable paramValueStringMap
*/
public Map<String, String> getParamValueStrings() {
return Collections.unmodifiableMap(paramValueStringMap);
}
public String getCommandName() {
return commandName;
}
private CliOption getCliOption(Annotation[][] parameterAnnotations, int index) {
Annotation[] annotations = parameterAnnotations[index];
for (Annotation annotation : annotations) {
if (annotation instanceof CliOption) {
return (CliOption) annotation;
}
}
return null;
}
}<|fim▁end|> | }
// this maps are used for easy access of option values in String form.
// It's used in tests and validation of option values in pre-execution |
<|file_name|>test_instrument.py<|end_file_name|><|fim▁begin|>import os
import json
from nose.tools import assert_equal
from .project import load_lsdsng
from .utils import temporary_file
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
def _test_load_store_instrument(source_lsdsng, lsdinst_path, original_index):
proj = load_lsdsng(source_lsdsng)
proj.song.instruments.import_from_file(0x2a, lsdinst_path)
target_instr = proj.song.instruments[0x2a]
original_instr = proj.song.instruments[original_index]
assert_equal(original_instr, target_instr)
with temporary_file() as tmpfile:
original_instr.export_to_file(tmpfile)
with open(tmpfile, 'r') as fp:
saved_inst = json.load(fp)
with open(lsdinst_path, 'r') as fp:
original_inst = json.load(fp)
assert_equal(original_inst, saved_inst)
def test_load_store_wave_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST_0x00_wave.lsdinst'),
0x00)
def test_load_store_pulse_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST_0x03_pulse.lsdinst'),
0x03)
def test_load_store_kit_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'UNTOLDST_0x16_kit.lsdinst'),
0x16)
def test_load_store_noise_instrument():
_test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'ANNARKTE.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'ANNARKTE_0x06_noise.lsdinst'),
0x06)
def test_load_store_arduinoboy():<|fim▁hole|> _test_load_store_instrument(
os.path.join(SCRIPT_DIR, 'test_data', 'ARDBOYxx.lsdsng'),
os.path.join(SCRIPT_DIR, 'test_data', 'MIDI.lsdinst'),
0x01)<|fim▁end|> | |
<|file_name|>equals_height.js<|end_file_name|><|fim▁begin|>(function ($) {
$(document).ready(function() {
var highestCol = Math.max($('.first-menu .pane-content').height(),$('.middle-menu .pane-content').height(),$('.last-menu .pane-content').height());
/*.first-menu .pane-content, .middle-menu .pane-content, .last-menu .pane-content
$('.elements').height(highestCol);*/
alert(highestCol);
});<|fim▁hole|><|fim▁end|> |
})(jQuery); |
<|file_name|>p05_delete_gl_entries_for_cancelled_vouchers.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
def execute():
import webnotes
entries = webnotes.conn.sql("""select voucher_type, voucher_no
from `tabGL Entry` group by voucher_type, voucher_no""", as_dict=1)
for entry in entries:
try:
cancelled_voucher = webnotes.conn.sql("""select name from `tab%s` where name = %s
and docstatus=2""" % (entry['voucher_type'], "%s"), entry['voucher_no'])
if cancelled_voucher:
webnotes.conn.sql("""delete from `tabGL Entry` where voucher_type = %s and
voucher_no = %s""", (entry['voucher_type'], entry['voucher_no']))
except:<|fim▁hole|> pass<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from pymongo.connection import Connection
from django.db import models
from eventtracker.conf import settings
def get_mongo_collection():
"Open a connection to MongoDB and return the collection to use."
if settings.RIGHT_MONGODB_HOST:
connection = Connection.paired(
left=(settings.MONGODB_HOST, settings.MONGODB_PORT),
right=(settings.RIGHT_MONGODB_HOST, settings.RIGHT_MONGODB_PORT)
)
else:
connection = Connection(host=settings.MONGODB_HOST, port=settings.MONGODB_PORT)
return connection[settings.MONGODB_DB][settings.MONGODB_COLLECTION]
def save_event(collection, event, timestamp, params):
"Save the event in MongoDB collection"
collection.insert({
'event': event,
'timestamp': datetime.fromtimestamp(timestamp),
'params': params
})
class Event(models.Model):
"Dummy model for development."<|fim▁hole|> event = models.SlugField()
params = models.TextField()<|fim▁end|> | timestamp = models.DateTimeField(auto_now_add=True) |
<|file_name|>TodoListDetail.js<|end_file_name|><|fim▁begin|>/**
* This is the controller file for "TodoDetail"
*
* @class Controller.TodoDetail
* @author Steven House
* @email [email protected]
*/
var args = arguments[0] || {};
var itemId = args.itemId || "";
// Include logging utility
var log = Alloy.Globals.log;
var args = arguments[0] || {};
var id = args.id;
var todo = Alloy.Collections.ToDo;
var todoJSON = todo.toJSON();
var todoItem = _.first(_.where(todoJSON, {name: itemId}));
todoItem = JSON.parse(todoJSON);
//var todoItem = todo.findWhere({ name: itemId });
//var todoItem = _.first(todo.where({ name: itemId }));
//var todoItemJSON = todoItem.toJSON();
log.info('[TodoDetail] : Opened Item', todoItem);
var moment = require('moment');
var galleryExists = false;
init();
/**
* Start the controller running
* @method init
* @return
*/
function init() {
setupNav();
addEventListeners();
//alert(todoItem.name);
var name = todoItem.name;
//$.labelTitle.text = todoItem.name.toUpperCase();
//$.labelTitle.text = todoItem.get("name");
/*
if (isDone()) {
log.debug('[TodoDetail] : Initializing : Completed');
$.viewDone.height = 44;
//$.viewPhoto.height = 0;
$.addClass($.viewDone, 'bgDarkGreen');
}
if (hasReminder()) {
log.debug('[TodoDetail] : Initializing : Completed');
$.viewAptTime.height = 44;
$.viewScheduleApt.height = 0;
$.addClass($.viewScheduleApt, 'bgDarkGreen');
var reminderDate = todoItem.get('reminderDateTime');
var dateText = moment.utc(reminderDate).fromNow();
$.labelReminder.text = dateText;
// + reminderDate;
}
if (hasPhoto()) {
createGallery();
galleryExists = true;
}
*/
// @TODO Figure out why this is needed. The nav widget should handle it
//$.windowTodoDetail.open();
}
/**
* Setup the Nav Bar
* @method setupNav
*/
function setupNav() {
if (Alloy.isTablet) {
return;
}
}
/**
* Add event listeners for the ListView.
* @method addEventListeners
* @return
*/
function addEventListeners() {
// Mark as Done
$.viewDone.addEventListener('click', done);
// Set Reminder
$.viewScheduleApt.addEventListener('click', setReminder);
// Capture a photo
$.viewPhoto.addEventListener('click', captureImage);
}
/**
* Handles the done click event listener
* @method done
* @return
*/
function done() {
log.event({
type: 'todo',
action: 'completed',
description: todoItem.get('name'),
eventId: todoItem.get('name')
});
$.addClass($.viewDone, 'bgDarkGreen');
//$.viewDone.height = 0;
//$.viewPhoto.height = 44;
todoItem.set({
complete: true,
completedDateTime: new Date().toISOString()
});
todoItem.save();
}
/**
* Description
* @method isDone
* @return CallExpression
*/
function isDone() {
return todoItem.get('status');
}
/**
* Checks if item has reminder and changes UI based on this
* @method hasReminder
*/
function hasReminder() {
return todoItem.get('reminderDateTime');
}
/**
* Checks if item has reminder and changes UI based on this
* @method hasReminder
*/
function hasPhoto() {
return todoItem.get('hasPhoto');
}
/**
* Invoke the calendar module to set a date
* @method setReminder
* @return
*/
function setReminder() {
log.debug('[TodoDetail] : setReminder');
if (Ti.Platform.osname === 'android') {
var now = new Date();
var month = now.getUTCMonth() + 1;
var day = now.getUTCDate();
var year = now.getUTCFullYear();
var Dialogs = require("yy.tidialogs");
// Create the dialog
// value property is priority
var picker = Dialogs.createDatePicker({
okButtonTitle: 'Set', // <-- optional, default "Done"
cancelButtonTitle: 'Cancel', // <-- optional, default "Cancel"
value: new Date(), // <-- optional
day: day, // <-- optional
month: month, // <-- optional - java/javascript month, i.e. August
year: year // <-- optional
});
// Add the click listener
picker.addEventListener('click',function(e){
if (!e.cancel) {
saveDate(e.value);
} else {
// Android Cancel Date
}
});
// Cancel listener
picker.addEventListener('cancel', function() {
Ti.API.info("dialog was cancelled");
});
// open it
picker.show();
}
// iOS will use different date picker
else {
$.viewRow.height = 0;
var calendar = require('ti.sq');
var now = new Date();
var month = now.getUTCMonth() + 1;
var day = now.getUTCDate();
var year = now.getUTCFullYear();
var minYear = year - 1;
var maxYear = year + 1;
var calValue = {
month: month,
day: day,
year: year
};
var calMin = {
month: month,
day: day,
year: minYear
};
var calMax = {
month: month,
day: day,
year: maxYear
};
var calendarView = calendar.createView({
height: Ti.UI.FILL,
width: Ti.UI.FILL,
top: 0,
left: 10,
right: 10,
//bottom: 65,
pagingEnabled: true,
value: {
month: month,
day: day,
year: year
},
min: {
month: month,
day: 1,
year: year
},
max: {
month: month,
day: day,
year: maxYear
}
});
$.viewMain.add(calendarView);
calendarView.addEventListener('dateChanged', function(d) {
var opts = {
options: ['Yep!', 'Changed my mind'],
selectedIndex: 0,
destructive: 0,
title: 'Set A Reminder for ' + calendarView.value.month +
'/' + calendarView.value.day + '/' +
calendarView.value.year + ' ?'
};
var dialog = Ti.UI.createOptionDialog(opts);
dialog.show();
dialog.addEventListener('click', function(e) {
if (e.index == 0) {
saveDate(d.dateValue);
} else {
//Alloy.Globals.toast.show("Reminder cancelled");
alert("Reminder cancelled");
}
$.viewMain.remove(calendarView);
});
$.viewRow.height = Ti.UI.FILL;
});
}
}
/**
* @method saveDate
*/
function saveDate(d) {
log.debug("[TodoDetail] Set a reminder for : dateChanged = ", d);
var moment = require('moment');
log.event({
type: 'todo',
action: 'set a reminder for',
description: todoItem.get('name') + " " + moment(d).fromNow(),
eventId: todoItem.get('name')
});
todoItem.set({ reminderDateTime: d });
todoItem.save();
//Alloy.Globals.toast.show("Reminder set!");
alert("Reminder set!");
}
/**
* This invokes the camera
* @method captureImage
* @return
*/
function captureImage() {
log.debug('[TodoDetail] : captureImage');
var camera = require('Camera');
camera.captureImage({success: savePhoto});
}
/**
* Save a photo to the SD card
* @method savePhoto
*/<|fim▁hole|> if (image.mediaType == Ti.Media.MEDIA_TYPE_PHOTO) {
log.event({
type: 'todo',
action: 'captured',
description: 'an image for' + todoItem.get('name'),
eventId: todoItem.get('name')
});
log.debug('[TodoDetail] : captureImage : Camera Success, image = ', image);
// This part should be skipped to the existing function
var imageDir = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, 'todo');
if (!imageDir.exists()) {
imageDir.createDirectory();
}
// Add +1 to the existing photoCount
var photoCount = todoItem.get('photoCount') + 1;
var file = Ti.Filesystem.getFile(imageDir.resolve(), itemId +
photoCount + '.png');
log.debug("[TodoDetail] : Saving image to = ", imageDir.resolve() +
itemId + photoCount + '.png');
// Write to storage
file.write(image.media);
todoItem.set({
hasPhoto: true,
photoCount: photoCount
});
todoItem.save();
log.debug('[TodoDetail] : Saved image to this location : ',
file.nativePath);
updateGallery();
} else {
alert('We are only supporting images at the moment.');
todoItem.set({
hasVideo: true
});
todoItem.save();
}
}
/**
* This returns an imageView created from the file system
* @method getPictureView
* @param {photoCount}
* @param {width}
* @param {height}
* @return {Object} imageView
*/
function getPictureView(photoCount, width, height) {
log.debug('[TodoDetail] : getPictureView : photoCount = ',
photoCount + ", width = " + width + ", height = " + height);
// Create the directory if it doesn't exist
var imageDir = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, 'todo');
var file = Ti.Filesystem.getFile(imageDir.resolve(), itemId + photoCount + '.png');
if (!file.exists()) {
log.warn(
'[TodoDetail] : No saved pictures found. Should not see this'
);
return false;
} else {
var image = file.read();
log.info('[TodoDetail] : Retrieved saved picture : ',
image);
var imageView = Ti.UI.createImageView({
image: image,
width: width,
height: height,
borderColor: "white"
});
//$.viewMain.add(imageView);
return imageView;
}
}
/**
* Create Gallery of photos / videos
* @method createGallery
*/
function createGallery() {
log.debug('[TodoDetail] : createGallery() : image number = ', todoItem.get('photoCount'));
galleryExists = true;
var photoCount = todoItem.get('photoCount');
var images = [];
var columns = 0;
// Bail if no photos
if (photoCount < 1) {
log.debug("[TodoDetail] : createGallery : photoCount === 0");
return false;
} else if (photoCount == 1) {
columns = 1;
} else if (photoCount == 2) {
columns = 2;
} else {
columns = 3;
}
$.tdg.init({
columns: columns,
space: 5,
delayTime: 500,
gridBackgroundColor: '#e1e1e1',
itemBackgroundColor: '#9fcd4c',
itemBorderColor: '#6fb442',
itemBorderWidth: 0,
itemBorderRadius: 3
});
// For each photo count create a photo
_(photoCount).times(function(n) {
//THIS IS THE DATA THAT WE WANT AVAILABLE FOR THIS ITEM WHEN onItemClick OCCURS
var itemData = {
caption: 'Test'
};
var imageView = getPictureView(n + 1, 150, 150);
//NOW WE PUSH TO THE ARRAY THE VIEW AND THE DATA
images.push({
view: imageView,
data: itemData
});
});
//ADD ALL THE ITEMS TO THE GRID
$.tdg.addGridItems(images);
$.tdg.setOnItemClick(function(e){
alert('Selected Item: ' + JSON.stringify(e, null, 4));
});
}
/**
* Update the gallery and add a menu item
* @method updateGallery
*/
function updateGallery() {
log.debug("[TodoDetail] : Updating Gallery");
// If gallery doesn't exist create it
if (!galleryExists) {
createGallery();
return
}
// If gallery does exist add the first item
var imageView = getPictureView(1, 150, 150);
$.tdg.addGridItems({
view: imageView,
data: {
caption: 'Test'
}
});
}<|fim▁end|> | function savePhoto(image) { |
<|file_name|>Preferences.cpp<|end_file_name|><|fim▁begin|>/*
* PCLanChat, the decentralized chat client
* Copyright (C) 2017 Kuklin István
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/<|fim▁hole|>#include <QJsonDocument>
#include <QStandardPaths>
#include "Preferences.hpp"
#include "Network.hpp"
Preferences::Preferences() {
if(!getPreferencesFolder().mkpath(getPreferencesFolder().path())) {
QMessageBox messageBox;
messageBox.setText("Error accessing configuration directory!");
return;
} //creates the directory
preferencesFile.setFileName("Preferences.json");
preferencesDialog = new PreferencesDialog(this);
load();
}
Preferences::~Preferences() {
save();
delete preferencesDialog;
}
QDir Preferences::getPreferencesFolder() {
return QDir(QStandardPaths::standardLocations(QStandardPaths::AppConfigLocation).first());
}
void Preferences::openPreferences() {
preferencesDialog->ui.NicknameEdit->setText(QString::fromStdString(getValues().nickname));
preferencesDialog->ui.ListenCheckBox->setChecked(getValues().listen);
preferencesDialog->ui.SelfAdvertisingCheckBox->setChecked(getValues().selfAdvertising);
preferencesDialog->ui.buttonBox->button(QDialogButtonBox::Apply)->setEnabled(false);
preferencesDialog->show();
}
void Preferences::onApplied() {
preferencesDialog->ui.buttonBox->button(QDialogButtonBox::Apply)->setEnabled(false);
values.nickname = preferencesDialog->ui.NicknameEdit->text().toStdString();
values.listen = preferencesDialog->ui.ListenCheckBox->isChecked();
values.selfAdvertising = preferencesDialog->ui.SelfAdvertisingCheckBox->isChecked();
server->restart();
}
void Preferences::save() {
QJsonObject jsonObject = preferencesJsonDocument.object();
jsonObject["nickname"] = QString::fromStdString(getValues().nickname);
jsonObject["listen"] = getValues().listen;
jsonObject["selfAdvertising"] = getValues().selfAdvertising;
QDir::setCurrent(getPreferencesFolder().path());
preferencesFile.open(QIODevice::WriteOnly);
preferencesJsonDocument.setObject(jsonObject);
preferencesFile.write(preferencesJsonDocument.toJson());
preferencesFile.close();
}
void Preferences::load() {
QDir::setCurrent(getPreferencesFolder().path());
preferencesFile.open(QIODevice::ReadOnly);
preferencesJsonDocument = QJsonDocument::fromJson(preferencesFile.readAll());
preferencesFile.close();
QJsonObject jsonObject = preferencesJsonDocument.object();
values.nickname = jsonObject["nickname"].toString().toStdString(); //yeah looks stupid :)
values.listen = jsonObject["listen"].toBool();
values.selfAdvertising = jsonObject["selfAdvertising"].toBool();
}
PreferencesDialog::PreferencesDialog(Preferences *preferencesInput) {
preferences = preferencesInput;
ui.setupUi(this);
setFixedSize(352, 352);
connect(ui.buttonBox->button(QDialogButtonBox::Apply), SIGNAL(released()), preferences, SLOT(onApplied()));
}
void PreferencesDialog::bindPreferences(Preferences *preferencesInput) {
preferences = preferencesInput;
}
void PreferencesDialog::accept() {
preferences->onApplied();
hide();
}
void PreferencesDialog::reject() {
hide();
}<|fim▁end|> |
#include <cstdlib>
#include <QtWidgets> |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <zmf/AbstractModule.hpp>
#include "zsdn/StartupHelper.h"
#include "LinkDiscoveryModule.hpp"
#include <google/protobuf/stubs/common.h>
int main(int argc, char* argv[]) {
int returnCode;
if (zsdn::StartupHelper::paramsOkay(argc, argv)) {
zmf::logging::ZmfLogging::initializeLogging("LinkDiscoveryModule", argv[1]);<|fim▁hole|> }
google::protobuf::ShutdownProtobufLibrary();
return returnCode;
}<|fim▁end|> | returnCode = zsdn::StartupHelper::startInConsole(new LinkDiscoveryModule(0), argv[1]);
} else {
returnCode = 1; |
<|file_name|>storeclient.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2010,2011,2012,2013 Big Switch Networks, Inc.
#
# Licensed under the Eclipse Public License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.eclipse.org/legal/epl-v10.html
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
#
# module: storeclient.py
#
# This module manages communication with the console, i.e. the REST interface
# of a Big Switch Controller node.
import urllib
import urllib2
import ftplib
import json
import datetime
import time
import traceback
import url_cache
class StringReader():
# used for ftp, as a replacement for read from an existing file
def __init__(self, value):
"""
Value can be a string, or a generator.
"""
self.value = value
self.offset = 0
if type(value) == str or type(value) == unicode:
self.len = len(value)
else:
self.last = None
def read(self, size = None):
if size:
if size > self.len - self.offset:
size = self.len - self.offset
result = self.value[self.offset:size]
self.offset += size
return result
# supporing generators.
if self.last: # use remainder
if size > self.len - self.offset:
size = self.len - self.offset
result = self.last[self.offset:size]
self.offset += size
if self.offset == self.len:
self.last = None
return result
item = value.next()
len_item = len(item)
if len_item <= size:
return item
# set up remainder
result = item[:size]
self.last = item[size:]
self.offset = 0
self.len = len(self.last)
return result
class StoreClient():
controller = None
display_rest = False
display_rest_reply = False
table_read_url = "http://%s/rest/v1/model/%s/"
entry_post_url = "http://%s/rest/v1/model/%s/"
user_data_url = "http://%s/rest/v1/data/"
sdn_platform_data_url = "http://%s/rest/v1/system/"
def set_controller(self,controller):
self.controller = controller
def display_mode(self, mode):
self.display_rest = mode
def display_reply_mode(self, mode):
self.display_rest_reply = mode
def set_sdn_controller_platform_rest_if(self, sdn_controller_rest_if):
url = self.sdn_platform_data_url % (self.controller)
url = url + "restifaddr/"
data = self.rest_post_request(url, sdn_controller_rest_if)
def rest_simple_request(self,url, use_cache = None, timeout = None):
# include a trivial retry mechanism ... other specific
# urllib2 exception types may need to be included
retry_count = 0
if use_cache == None or use_cache:
result = url_cache.get_cached_url(url)
if result != None:
return result
while retry_count > 0:
try:
return urllib2.urlopen(url, timeout = timeout).read()
except urllib2.URLError:
retry_count -= 1
time.sleep(1)
# try again without the try...
if self.display_rest:
print "REST-SIMPLE:", 'GET', url
result = urllib2.urlopen(url, timeout = timeout).read()
if self.display_rest_reply:
print 'REST-SIMPLE: %s reply "%s"' % (url, result)
url_cache.save_url(url, result)
return result
def rest_json_request(self, url):
entries = url_cache.get_cached_url(url)
if entries != None:
return entries
result = self.rest_simple_request(url)
# XXX check result
entries = json.loads(result)
url_cache.save_url(url, entries)
return entries
def rest_post_request(self, url, obj, verb='PUT'):
post_data = json.dumps(obj)
if self.display_rest:
print "REST-POST:", verb, url, post_data
request = urllib2.Request(url, post_data, {'Content-Type':'application/json'})
request.get_method = lambda: verb
response = urllib2.urlopen(request)
result = response.read()
if self.display_rest_reply:
print 'REST-POST: %s reply: "%s"' % (url, result)
return result
def get_table_from_store(self, table_name, key=None, val=None, match=None):
if not self.controller:
print "No controller specified. Set using 'controller <server:port>'."
return
url = self.table_read_url % (self.controller, table_name)
if not match:
match = "startswith"
if key and val:
url = "%s?%s__%s=%s" % (url, key, match, urllib.quote_plus(val))
result = url_cache.get_cached_url(url)
if result != None:
return result
data = self.rest_simple_request(url)
entries = json.loads(data)
url_cache.save_url(url, entries)
return entries
def get_object_from_store(self, table_name, pk_value):
if not self.controller:
print "No controller specified. Set using 'controller <server:port>'."
return
url = self.table_read_url % (self.controller, table_name)
url += (pk_value + '/')
result = url_cache.get_cached_url(url)
if result != None:
return result
if self.display_rest:
print "REST-MODEL:", url
response = urllib2.urlopen(url)
if response.code != 200:
# LOOK! Should probably raise exception here instead.
# In general we need to rethink the store interface and how
# we should use exceptions.
return None
data = response.read()
result = json.loads(data)
if self.display_rest_reply:
print 'REST-MODEL: %s reply: "%s"' % (url, result)
url_cache.save_url(url, result)
return result
# obj_data must contain a key/val and any other required data
def rest_create_object(self, obj_type, obj_data):
if not self.controller:
print "No controller specified. Set using 'controller <server:port>'."
return
url_cache.clear_cached_urls()
url = self.entry_post_url % (self.controller, obj_type)
data = self.rest_post_request(url, obj_data)
# LOOK! successful stuff should be returned in json too.
if data != "saved":
result = json.loads(data)<|fim▁hole|> def find_object_from_store(self, obj_type, key, val):
if not self.controller:
print "No controller specified. Set using 'controller <server:port>'."
return
url = self.table_read_url % (self.controller, obj_type)
result = url_cache.get_cached_url(url)
if result != None:
return result
data = self.rest_simple_request("%s?%s__exact=%s" % (url, key, urllib.quote_plus(val)))
entries = json.loads(data)
url_cache.save_url(url, entries)
return entries
def rest_query_objects(self, obj_type, query_params=None):
if not self.controller:
print "No controller specified. Set using 'controller <server:port>'."
return
url = self.table_read_url % (self.controller, obj_type)
if query_params:
url += '?'
# Convert any data:None fields to <id>__isnull=True
non_null_query_params = dict([[n,v] if v != None else [n + '__isnull', True]
for (n,v) in query_params.items()])
url += urllib.urlencode(non_null_query_params)
result = url_cache.get_cached_url(url)
if result != None:
return result
data = self.rest_simple_request(url)
entries = json.loads(data)
url_cache.save_url(url, entries)
return entries
#
# either must contain a key/val and any other required data
# of the key must be a dictionary identifying the item to delete.
def rest_delete_object(self, obj_type, key, val = None):
dict_ = {}
url = self.entry_post_url % (self.controller, obj_type)
if val == None:
if not type(key) == type(dict_):
return None
dict_ = key
else:
url += "?%s__exact=%s" % (key, urllib.quote_plus(val))
# LOOK! I'm not sure this works the way it seems to me it's
# designed to work. I think the intent is that you can specify
# query parameters in the key argument which controls which
# instance(s) should be deleted. But when I try it it seems to
# always delete all instances, so it seems like the parameters
# don't filter properly when passed via the POST data as opposed
# to being specified as query parameters in the URL. The latter
# way does work -- see rest_delete_objects that follows this.
data = self.rest_post_request(url, dict_, 'DELETE')
# LOOK! successful stuff should be returned in json too.
if data != "deleted":
dict_ = json.loads(data)
return dict_
url_cache.clear_cached_urls()
def rest_delete_objects(self, obj_type, query_params):
url = self.entry_post_url % (self.controller, obj_type)
if query_params:
url += '?'
# Convert any data:None fields to <id>__isnull=True
non_null_query_params = dict([[n,v] if v != None else [n + '__isnull', True]
for (n,v) in query_params.items()])
url += urllib.urlencode(non_null_query_params)
data = self.rest_post_request(url, {}, 'DELETE')
# LOOK! successful stuff should be returned in json too.
if data != "deleted":
result = json.loads(data)
return result
url_cache.clear_cached_urls()
def rest_update_object(self, obj_type, obj_key_name, obj_key_val, obj_data):
if not self.controller:
print "No controller specified. Set using 'controller <server:port>'."
return
url = self.entry_post_url % (self.controller, obj_type)
url += "?%s=%s" % (obj_key_name, urllib.quote_plus(obj_key_val)) # add a query string
data = self.rest_post_request(url, obj_data)
# LOOK! successful stuff should be returned in json too.
result = json.loads(data)
if result.get('description', '') != "saved":
return result
url_cache.clear_cached_urls()
def set_user_data_file(self, name, text):
url = self.user_data_url % (self.controller)
version = 1 # default
# find the latest version for a name
existing_data = self.get_user_data_table(name, "latest")
if len(existing_data) > 0: # should be at most 1, but just in case...
version = max([int(f['version']) for f in existing_data]) + 1 # LOOK! race?
length = len(text)
# LOOK! what to do about time in a distributed system!
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%d.%H:%M:%S")
url += "%s/timestamp=%s/version=%s/length=%s/" % (name, timestamp, version, length)
return self.copy_text_to_url(url, text)
def get_user_data_file(self, name):
url = self.user_data_url % (self.controller)
url += name + "/"
return self.rest_simple_request(url)
def delete_user_data_file(self, name):
url = self.user_data_url % (self.controller)
url += name + "/"
data = self.rest_post_request(url, {}, 'DELETE')
if data != "deleted":
result = json.loads(data)
return result
def get_user_data_table(self, name=None, show_version="latest"):
if not self.controller:
print "No controller specified. Set using 'controller <server:port>'."
return None
url = self.user_data_url % self.controller
if name:
url += "?name__startswith=%s" % name
data = self.rest_simple_request(url)
new_data = []
data = json.loads(data)
latest_versions = {} # dict of latest version per name
for d in data: # list of dicts
l = d['name'].split('/') # ex: startup/timestamp=2010-11-03.05:51:27/version=1/length=2038
nd = dict([item.split('=') for item in l[1:]])
nd['name'] = l[0]
nd['full_name'] = d['name']
new_data.append(nd)
if not nd['name'] in latest_versions or int(nd['version']) > int(latest_versions[nd['name']]):
latest_versions[nd['name']] = nd['version'] # initialize first time
# prune if needed to a name or a particular version
if name:
new_data = [ nd for nd in new_data if nd['name'].startswith(name) ]
if show_version == "latest":
new_data = [ nd for nd in new_data if not int(nd['version']) < int(latest_versions[nd['name']]) ]
elif show_version != "all":
new_data = [ nd for nd in new_data if nd['version'] == show_version ]
return new_data
# LOOK! looks a lot like a rest_post_request except we don't jsonify and we handle
# errors differently... refactor? Same with get_text and rest_simple_request
def copy_text_to_url(self, url, src_text, message = None):
post_data = src_text
if url.startswith('ftp://'):
url_suffix = url[6:]
user = 'anonymous'
password = ''
if url_suffix.find('@') != -1:
url_parts = url_suffix.split('@')
url_user_and_password = url_parts[0]
url_suffix = url_parts[1]
if url_user_and_password.find(':') != -1:
user_and_password = url_user_and_password.split(':')
user = user_and_password[0]
password = user_and_password[1]
else:
user = url_user_and_password
host = url_suffix
path = None
if url_suffix.find('/'):
url_suffix_parts = url_suffix.split('/')
host = url_suffix_parts[0]
path = url_suffix_parts[1]
ftp_target = ftplib.FTP(host, user, password)
ftp_target.storbinary('STOR %s' % path, StringReader(post_data))
# apparently, storbinary doesn't provide a return value
result = { "result" : "success" } # don't display any other error messages
else:
request = urllib2.Request(url, post_data, {'Content-Type':'text/plain'})
request.get_method = lambda: 'PUT'
if self.display_rest:
print "REST-TEXT-TO:", request
response = urllib2.urlopen(request)
result = response.read()
if self.display_rest_reply:
print 'REST-TEXT-TO: %s reply "%s"' % (request, result)
return result
def get_text_from_url(self, url):
if self.display_rest:
print "REST-TEXT-FROM:", url
result = urllib2.urlopen(url).read()
if self.display_rest_reply:
print 'REST-TEXT-FROM: %s result:"%s"' % (url, result)
return result<|fim▁end|> | return result
url_cache.clear_cached_urls()
|
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|># Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from rest_framework import permissions
from promort.settings import DEFAULT_GROUPS
<|fim▁hole|> """
Only specific users that belong to ODIN_MEMBERS group will be allowed
to perform queries using Odin toolkit
"""
RESTRICTED_METHODS = ['GET']
def has_permission(self, request, view):
if not (request.user and request.user.is_authenticated()):
return False
else:
if request.method in self.RESTRICTED_METHODS:
if request.user.groups.filter(
name__in=[DEFAULT_GROUPS['odin_members']['name']]
).exists():
return True
else:
return False
else:
return False<|fim▁end|> | class CanEnterGodMode(permissions.BasePermission): |
<|file_name|>driver.py<|end_file_name|><|fim▁begin|>"""
Dummy Salesforce driver that simulates some parts of DB API 2
https://www.python.org/dev/peps/pep-0249/
should be independent on Django.db
and if possible should be independent on django.conf.settings
Code at lower level than DB API should be also here.
"""
from collections import namedtuple
import requests
import socket
from django.conf import settings
from django.utils.six import PY3
try:
import beatbox
except ImportError:
beatbox = None
import logging
log = logging.getLogger(__name__)
apilevel = "2.0"
# threadsafety = ...
# uses '%s' style parameters
paramstyle = 'format'
API_STUB = '/services/data/v35.0'
request_count = 0 # global counter
# All error types described in DB API 2 are implemented the same way as in
# Django 1.6, otherwise some exceptions are not correctly reported in it.
class Error(Exception if PY3 else StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class DataError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class InternalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class SalesforceError(DatabaseError):
"""
DatabaseError that usually gets detailed error information from SF response
in the second parameter, decoded from REST, that frequently need not to be
displayed.
"""
def __init__(self, message='', data=None, response=None, verbose=False):
DatabaseError.__init__(self, message)
self.data = data
self.response = response
self.verbose = verbose
if verbose:
log.info("Error (debug details) %s\n%s", response.text,
response.__dict__)
class Connection(object):
# close and commit can be safely ignored because everything is
# committed automatically and REST is stateles.
def close(self):
pass
def commit(self):
pass
def rollback(self):
log.info("Rollback is not implemented.")
# DB API function
def connect(**params):
return Connection()
# LOW LEVEL
def getaddrinfo_wrapper(host, port, family=socket.AF_INET, socktype=0, proto=0, flags=0):
"""Patched 'getaddrinfo' with default family IPv4 (enabled by settings IPV4_ONLY=True)"""
return orig_getaddrinfo(host, port, family, socktype, proto, flags)
# patch to IPv4 if required and not patched by anything other yet
if getattr(settings, 'IPV4_ONLY', False) and socket.getaddrinfo.__module__ in ('socket', '_socket'):
log.info("Patched socket to IPv4 only")
orig_getaddrinfo = socket.getaddrinfo
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getaddrinfo_wrapper
# ----<|fim▁hole|>
def handle_api_exceptions(url, f, *args, **kwargs):
"""Call REST API and handle exceptions
Params:
f: requests.get or requests.post...
_cursor: sharing the debug information in cursor
"""
#import pdb; pdb.set_trace()
#print("== REQUEST %s | %s | %s | %s" % (url, f, args, kwargs))
global request_count
# The 'verify' option is about verifying SSL certificates
kwargs_in = {'timeout': getattr(settings, 'SALESFORCE_QUERY_TIMEOUT', 3),
'verify': True}
kwargs_in.update(kwargs)
_cursor = kwargs_in.pop('_cursor', None)
log.debug('Request API URL: %s' % url)
request_count += 1
try:
response = f(url, *args, **kwargs_in)
# TODO some timeouts can be rarely raised as "SSLError: The read operation timed out"
except requests.exceptions.Timeout:
raise SalesforceError("Timeout, URL=%s" % url)
if response.status_code == 401:
# Unauthorized (expired or invalid session ID or OAuth)
data = response.json()[0]
if(data['errorCode'] == 'INVALID_SESSION_ID'):
token = f.__self__.auth.reauthenticate()
if('headers' in kwargs):
kwargs['headers'].update(dict(Authorization='OAuth %s' % token))
try:
response = f(url, *args, **kwargs_in)
except requests.exceptions.Timeout:
raise SalesforceError("Timeout, URL=%s" % url)
if response.status_code in (200, 201, 204):
return response
# TODO Remove this verbose setting after tuning of specific messages.
# Currently it is better more or less.
# http://www.salesforce.com/us/developer/docs/api_rest/Content/errorcodes.htm
verbose = not getattr(getattr(_cursor, 'query', None), 'debug_silent', False)
# Errors are reported in the body
data = response.json()[0]
if response.status_code == 404: # ResourceNotFound
if (f.__func__.__name__ == 'delete') and data['errorCode'] in (
'ENTITY_IS_DELETED', 'INVALID_CROSS_REFERENCE_KEY'):
# It is a delete command and the object is in trash bin or
# completely deleted or it only could be a valid Id for this type
# then is ignored similarly to delete by a classic database query:
# DELETE FROM xy WHERE id = 'something_deleted_yet'
return None
else:
# if this Id can not be ever valid.
raise SalesforceError("Couldn't connect to API (404): %s, URL=%s"
% (response.text, url), data, response, verbose
)
if(data['errorCode'] == 'INVALID_FIELD'):
raise SalesforceError(data['message'], data, response, verbose)
elif(data['errorCode'] == 'MALFORMED_QUERY'):
raise SalesforceError(data['message'], data, response, verbose)
elif(data['errorCode'] == 'INVALID_FIELD_FOR_INSERT_UPDATE'):
raise SalesforceError(data['message'], data, response, verbose)
elif(data['errorCode'] == 'METHOD_NOT_ALLOWED'):
raise SalesforceError('%s: %s' % (url, data['message']), data, response, verbose)
# some kind of failed query
else:
raise SalesforceError('%s' % data, data, response, verbose)<|fim▁end|> | |
<|file_name|>nb-carousel.controller.js<|end_file_name|><|fim▁begin|>/**
* Carousel controller
*
* @author Hein Bekker <[email protected]>
* @copyright (c) 2015 Hein Bekker
* @license http://www.gnu.org/licenses/agpl-3.0.txt AGPLv3
*/
(function (window, angular, undefined) {
'use strict';
angular
.module('nb.carousel')
.controller('nbCarouselController', nbCarouselController);
nbCarouselController.$inject = ['$scope', '$element', '$timeout', '$interval', '$animate', 'GSAP', '$window', 'nbWindow', '_'];
function nbCarouselController ($scope, $element, $timeout, $interval, $animate, GSAP, $window, nbWindow, _) {
/*jshint validthis: true */
var self = this;
var $$window = angular.element($window);
var deregister = [];
var currentInterval; // {Promise}
var deferGotoInterval; // {Promise}
var deferGotoIndex;
var deferGotoDirection;
var flags = {
skipAnimation: true, // {Boolean} Prevents slide transition during the first gotoIndex().
destroyed: false, // {Boolean} Whether the scope has been destroyed.
transitioning: false // {Boolean} Whether there is a transition in progress.
};
var oldSlide; // {Scope}
var newSlide; // {Scope}
var maxWidth = 0, maxHeight = 0;
$scope.complete = false; // {Boolean} Whether all slides have loaded or failed to load.
$scope.slides = [];
$scope.direction = self.direction = 'left';
$scope.currentIndex = -1;
$scope.isPlaying = self.isPlaying = false;
/**
*
* @param {int} index
* @returns {Boolean}
*/
$scope.isCurrentSlideIndex = function (index) {
return $scope.currentIndex === index;
};
/**
*
* @param {int} index
* @param {string} direction left, right
*/
$scope.gotoIndex = function (index, direction) {
cancelDeferGoto();
<|fim▁hole|> return;
}
oldSlide = $scope.slides[$scope.currentIndex];
newSlide = $scope.slides[index];
// Stop here if the slide is not loaded.
if (!newSlide.complete) {
// Periodically check if the slide is loaded, and then try gotoIndex() again.
deferGoto(index, direction);
return;
}
$animate.addClass(newSlide.$element, 'fade-in', angular.noop);
if (angular.isUndefined(direction)) {
direction = (index < $scope.currentIndex) ? 'left' : 'right';
}
$scope.direction = self.direction = direction;
$scope.currentIndex = index;
// Reset the timer when changing slides.
restartTimer();
if (flags.skipAnimation || $scope.noTransition) {
flags.skipAnimation = false;
gotoDone();
}
else {
$timeout(function () {
// Stop here if the scope has been destroyed.
if (flags.destroyed) {
return;
}
flags.transitioning = true;
// Force reflow.
var reflow = newSlide.$element[0].offsetWidth;
$animate.removeClass(oldSlide.$element, 'active', angular.noop);
$animate.addClass(newSlide.$element, 'active', gotoDone)
.then(function () {
flags.transitioning = false;
});
});
}
};
/**
* Callback function fired after transition has been completed.
*/
function gotoDone () {
// Stop here if the scope has been destroyed.
if (flags.destroyed) {
return;
}
if (oldSlide) {
oldSlide.$element.removeClass('active');
}
if (newSlide) {
newSlide.$element.addClass('active');
}
}
/**
*
* @param {int} index
* @param {string} direction left, right
*/
function deferGoto (index, direction) {
deferGotoIndex = index;
deferGotoDirection = direction;
deferGotoFn();
}
/**
* Periodically checks if a slide is loaded. If so, fires gotoIndex().
*/
function deferGotoFn () {
cancelDeferGoto();
if ($scope.slides[deferGotoIndex].complete) {
$scope.gotoIndex(deferGotoIndex, deferGotoDirection);
}
else {
deferGotoInterval = $interval(deferGotoFn, 50);
}
}
function cancelDeferGoto () {
if (deferGotoInterval) {
$interval.cancel(deferGotoInterval);
deferGotoInterval = null;
}
}
/**
* Go to previous slide.
*/
$scope.prev = function () {
var newIndex = $scope.currentIndex > 0 ? $scope.currentIndex - 1 : $scope.slides.length - 1;
$scope.gotoIndex(newIndex, 'left');
};
/**
* Go to next slide.
*/
$scope.next = function () {
var newIndex = $scope.currentIndex < $scope.slides.length - 1 ? $scope.currentIndex + 1 : 0;
$scope.gotoIndex(newIndex, 'right');
};
function restartTimer () {
cancelTimer();
var interval = +$scope.interval;
if (!isNaN(interval) && interval > 0) {
currentInterval = $interval(timerFn, interval);
}
}
function cancelTimer () {
if (currentInterval) {
$interval.cancel(currentInterval);
currentInterval = null;
}
}
function timerFn () {
var interval = +$scope.interval;
if (self.isPlaying && !isNaN(interval) && interval > 0) {
$scope.next();
}
else {
$scope.pause();
}
}
$scope.play = function () {
if (!self.isPlaying) {
$scope.isPlaying = self.isPlaying = true;
restartTimer();
}
};
$scope.pause = function () {
if (!$scope.noPause) {
$scope.isPlaying = self.isPlaying = false;
cancelTimer();
}
};
/**
*
* @param {Scope} slide Slide scope
* @param {DOM element} element Slide DOM element
*/
self.addSlide = function (slide, element) {
slide.$element = element;
$scope.slides.push(slide);
if ($scope.slides.length === 1 || slide.active) {
$scope.gotoIndex($scope.slides.length - 1);
if ($scope.slides.length == 1) {
$scope.play();
}
}
else {
slide.active = false;
}
};
/**
*
* @param {Scope} slide
*/
self.removeSlide = function (slide) {
GSAP.TweenMax.killTweensOf(slide.$element);
var index = _.indexOf($scope.slides, slide);
$scope.slides.splice(index, 1);
if ($scope.slides.length > 0 && slide.active) {
if (index >= $scope.slides.length) {
$scope.gotoIndex(index - 1);
}
else {
$scope.gotoIndex(index);
}
}
else if ($scope.currentIndex > index) {
$scope.currentIndex--;
}
};
/**
* Checks if all the slides are loaded and sets the carousel load state.
*
* @param {Scope} slide
*/
self.setSlideComplete = function (slide) {
var length = $scope.slides.length;
var i = 0;
angular.forEach($scope.slides, function (slide) {
if (slide.complete) {
i++;
}
});
$scope.complete = (length === i);
};
/**
* Sets maximum width of slides (allows for slides of different sizes).
*
* @param {int} value
*/
self.setMaxWidth = function (value) {
if (value > maxWidth) {
maxWidth = value;
resize();
}
};
/**
* Sets maximum height of slides (allows for slides of different sizes).
*
* @param {int} value
*/
self.setMaxHeight = function (value) {
if (value > maxHeight) {
maxHeight = value;
resize();
}
};
/**
* Resizes carousel and slides.
*/
function resize (apply) {
if (maxWidth && maxHeight) {
var windowHeight = nbWindow.windowHeight() * 0.8;
var width = $element[0].scrollWidth;
var height = Math.min(windowHeight, maxHeight / maxWidth * width);
if (width && height) {
// Set height of carousel.
$element.css('height', height + 'px');
// Set width and height of slides.
angular.forEach($scope.slides, function (slide, index) {
slide.resize(width, height);
});
}
}
}
// Reset the timer when the interval property changes.
deregister.push($scope.$watch('interval', restartTimer));
// Gives the $animate service access to carousel properties.
deregister.push($scope.$watch('noTransition', function (value) {
self.noTransition = value;
}));
deregister.push($scope.$watch('transitionDuration', function (value) {
self.transitionDuration = value;
}));
deregister.push($scope.$watch('transitionEase', function (value) {
self.transitionEase = value;
}));
var onWindowResize = _.throttle(function () {
resize(true);
}, 60);
// On window resize, resize carousel and slides.
$$window.on('resize', onWindowResize);
$scope.$on('$destroy', function () {
flags.destroyed = true;
// Deregister watchers.
angular.forEach(deregister, function (fn) {
fn();
});
// Cancel deferred goto interval.
cancelDeferGoto();
// Cancel timer interval.
cancelTimer();
// Unbind window resize event listener.
$$window.off('resize', onWindowResize);
});
}
})(window, window.angular);<|fim▁end|> | // Stop here if there is a transition in progress or if the index has not changed.
if (flags.transitioning || $scope.currentIndex === index) { |
<|file_name|>math_ops.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Arithmetic Operators
TensorFlow provides several operations that you can use to add basic arithmetic
operators to your graph.
@@add
@@sub
@@mul
@@div
@@truediv
@@floordiv
@@mod
@@cross
## Basic Math Functions
TensorFlow provides several operations that you can use to add basic
mathematical functions to your graph.
@@add_n
@@abs
@@neg
@@sign
@@inv
@@square
@@round
@@sqrt
@@rsqrt
@@pow
@@exp
@@log
@@ceil
@@floor
@@maximum
@@minimum
@@cos
@@sin
@@lbeta
@@lgamma
@@digamma
@@erf
@@erfc
@@squared_difference
@@igamma
@@igammac
@@zeta
@@polygamma
## Matrix Math Functions
TensorFlow provides several operations that you can use to add linear algebra
functions on matrices to your graph.
@@batch_matrix_diag
@@batch_matrix_diag_part
@@batch_matrix_band_part
@@diag
@@diag_part
@@trace
@@transpose
@@matmul
@@batch_matmul
@@matrix_determinant
@@batch_matrix_determinant
@@matrix_inverse
@@batch_matrix_inverse
@@cholesky
@@batch_cholesky
@@cholesky_solve
@@batch_cholesky_solve
@@self_adjoint_eig
@@batch_self_adjoint_eig
@@matrix_solve
@@batch_matrix_solve
@@matrix_triangular_solve
@@batch_matrix_triangular_solve
@@matrix_solve_ls
@@batch_matrix_solve_ls
## Complex Number Functions
TensorFlow provides several operations that you can use to add complex number
functions to your graph.
@@complex
@@complex_abs
@@conj
@@imag
@@real
@@fft
@@ifft
@@fft2d
@@ifft2d
@@fft3d
@@ifft3d
@@batch_fft
@@batch_ifft
@@batch_fft2d
@@batch_ifft2d
@@batch_fft3d
@@batch_ifft3d
## Reduction
TensorFlow provides several operations that you can use to perform
common math computations that reduce various dimensions of a tensor.
@@reduce_sum
@@reduce_prod
@@reduce_min
@@reduce_max
@@reduce_mean
@@reduce_all
@@reduce_any
@@accumulate_n
## Segmentation
TensorFlow provides several operations that you can use to perform common
math computations on tensor segments.
Here a segmentation is a partitioning of a tensor along
the first dimension, i.e. it defines a mapping from the first dimension onto
`segment_ids`. The `segment_ids` tensor should be the size of
the first dimension, `d0`, with consecutive IDs in the range `0` to `k`,
where `k<d0`.
In particular, a segmentation of a matrix tensor is a mapping of rows to
segments.
For example:
```python
c = tf.constant([[1,2,3,4], [-1,-2,-3,-4], [5,6,7,8]])
tf.segment_sum(c, tf.constant([0, 0, 1]))
==> [[0 0 0 0]
[5 6 7 8]]
```
@@segment_sum
@@segment_prod
@@segment_min
@@segment_max
@@segment_mean
@@unsorted_segment_sum
@@sparse_segment_sum
@@sparse_segment_mean
@@sparse_segment_sqrt_n
## Sequence Comparison and Indexing
TensorFlow provides several operations that you can use to add sequence
comparison and index extraction to your graph. You can use these operations to
determine sequence differences and determine the indexes of specific values in
a tensor.
@@argmin
@@argmax
@@listdiff
@@where
@@unique
@@edit_distance
@@invert_permutation
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import six.moves
from tensorflow.python.client import graph_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import common_shapes
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_sparse_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import state_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_math_ops import *
# pylint: enable=wildcard-import
# Aliases for some automatically-generated names.
argmax = gen_math_ops.arg_max
argmin = gen_math_ops.arg_min
linspace = gen_math_ops.lin_space
# pylint: disable=anomalous-backslash-in-string,protected-access
def abs(x, name=None):
"""Computes the absolute value of a tensor.
Given a tensor of real numbers `x`, this operation returns a tensor
containing the absolute value of each element in `x`. For example, if x is
an input element and y is an output element, this operation computes
\\\\(y = |x|\\\\).
See [`tf.complex_abs()`](#tf_complex_abs) to compute the absolute value of a complex
number.
Args:
x: A `Tensor` of type `float`, `double`, `int32`, or `int64`.
name: A name for the operation (optional).
Returns:
A `Tensor` the same size and type as `x` with absolute values.
"""
with ops.op_scope([x], name, "Abs") as name:
x = ops.convert_to_tensor(x, name="x")
if x.dtype in (dtypes.complex64, dtypes.complex128):
return gen_math_ops.complex_abs(x, Tout=x.dtype.real_dtype, name=name)
return gen_math_ops._abs(x, name=name)
def complex_abs(x, name=None):
r"""Computes the complex absolute value of a tensor.<|fim▁hole|> Given a tensor `x` of complex numbers, this operation returns a tensor of type
`float` or `double` that is the absolute value of each element in `x`. All
elements in `x` must be complex numbers of the form \\(a + bj\\). The
absolute value is computed as \\( \sqrt{a^2 + b^2}\\).
For example:
```
# tensor 'x' is [[-2.25 + 4.75j], [-3.25 + 5.75j]]
tf.complex_abs(x) ==> [5.25594902, 6.60492229]
```
Args:
x: A `Tensor` of type `complex64` or `complex128`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float32` or `float64`.
"""
return gen_math_ops.complex_abs(x, Tout=x.dtype.real_dtype, name=name)
def scalar_mul(scalar, x):
"""Multiplies a scalar times a `Tensor` or `IndexedSlices` object.
Intended for use in gradient code which might deal with `IndexedSlices`
objects, which are easy to multiply by a scalar but more expensive to
multiply with arbitrary tensors.
Args:
scalar: A 0-D scalar `Tensor`. Must have known shape.
x: A `Tensor` or `IndexedSlices` to be scaled.
Returns:
`scalar * x` of the same type (`Tensor` or `IndexedSlices`) as `x`.
Raises:
ValueError: if scalar is not a 0-D `scalar`.
"""
scalar = ops.convert_to_tensor(scalar, dtype=x.dtype, name="scalar")
shape = scalar.get_shape()
if shape.ndims == 0:
if isinstance(x, ops.IndexedSlices):
return ops.IndexedSlices(scalar * x.values, x.indices, x.dense_shape)
else:
return scalar * x
else:
raise ValueError("Only scalar multiply works, got shape %s" % shape)
def pow(x, y, name=None):
"""Computes the power of one value to another.
Given a tensor `x` and a tensor `y`, this operation computes \\\\(x^y\\\\) for
corresponding elements in `x` and `y`. For example:
```
# tensor 'x' is [[2, 2], [3, 3]]
# tensor 'y' is [[8, 16], [2, 3]]
tf.pow(x, y) ==> [[256, 65536], [9, 27]]
```
Args:
x: A `Tensor` of type `float`, `double`, `int32`, `complex64`, or `int64`.
y: A `Tensor` of type `float`, `double`, `int32`, `complex64`, or `int64`.
name: A name for the operation (optional).
Returns:
A `Tensor`.
"""
with ops.op_scope([x], name, "Pow") as name:
return gen_math_ops._pow(x, y, name=name)
def complex(real, imag, name=None):
"""Converts two real numbers to a complex number.
Given a tensor `real` representing the real part of a complex number, and a
tensor `imag` representing the imaginary part of a complex number, this
operation returns complex numbers elementwise of the form \\(a + bj\\), where
*a* represents the `real` part and *b* represents the `imag` part.
The input tensors `real` and `imag` must have the same shape.
For example:
```
# tensor 'real' is [2.25, 3.25]
# tensor `imag` is [4.75, 5.75]
tf.complex(real, imag) ==> [[2.25 + 4.75j], [3.25 + 5.75j]]
```
Args:
real: A `Tensor`. Must be one of the following types: `float32`, `float64`.
imag: A `Tensor`. Must have the same type as `real`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `complex64` or `complex128`.
"""
real = ops.convert_to_tensor(real, name="real")
imag = ops.convert_to_tensor(imag, name="imag")
with ops.op_scope([real, imag], name, "Complex") as name:
input_types = (real.dtype, imag.dtype)
if input_types == (dtypes.float64, dtypes.float64):
Tout = dtypes.complex128
elif input_types == (dtypes.float32, dtypes.float32):
Tout = dtypes.complex64
else:
raise TypeError("Types of real and imag don't match: "
"{} {}".format(real.dtype.name, imag.dtype.name))
return gen_math_ops._complex(real, imag, Tout=Tout, name=name)
def real(input, name=None):
"""Returns the real part of a complex number.
Given a tensor `input` of complex numbers, this operation returns a tensor of
type `float` or `double` that is the real part of each element in `input`.
All elements in `input` must be complex numbers of the form \\(a + bj\\),
where *a* is the real part returned by this operation and *b* is the
imaginary part.
For example:
```
# tensor 'input' is [-2.25 + 4.75j, 3.25 + 5.75j]
tf.real(input) ==> [-2.25, 3.25]
```
Args:
input: A `Tensor`. Must be one of the following types: `complex64`,
`complex128`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float` or `double`.
"""
with ops.op_scope([input], name, "Real") as name:
return gen_math_ops.real(input, Tout=input.dtype.real_dtype, name=name)
def imag(input, name=None):
"""Returns the imaginary part of a complex number.
Given a tensor `input` of complex numbers, this operation returns a tensor of
type `float` or `double` that is the imaginary part of each element in
`input`. All elements in `input` must be complex numbers of the form \\(a +
bj\\), where *a* is the real part and *b* is the imaginary part returned by
this operation.
For example:
```
# tensor 'input' is [-2.25 + 4.75j, 3.25 + 5.75j]
tf.imag(input) ==> [4.75, 5.75]
```
Args:
input: A `Tensor`. Must be one of the following types: `complex64`, `complex128`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float` or `double`.
"""
with ops.op_scope([input], name, "Imag") as name:
return gen_math_ops.imag(input, Tout=input.dtype.real_dtype, name=name)
def round(x, name=None):
"""Rounds the values of a tensor to the nearest integer, element-wise.
For example:
```python
# 'a' is [0.9, 2.5, 2.3, -4.4]
tf.round(a) ==> [ 1.0, 3.0, 2.0, -4.0 ]
```
Args:
x: A `Tensor` of type `float` or `double`.
name: A name for the operation (optional).
Returns:
A `Tensor` of same shape and type as `x`.
"""
x = ops.convert_to_tensor(x, name="x")
if x.dtype.is_integer:
return x
else:
return gen_math_ops.floor(x + 0.5, name=name)
def cast(x, dtype, name=None):
"""Casts a tensor to a new type.
The operation casts `x` (in case of `Tensor`) or `x.values`
(in case of `SparseTensor`) to `dtype`.
For example:
```python
# tensor `a` is [1.8, 2.2], dtype=tf.float
tf.cast(a, tf.int32) ==> [1, 2] # dtype=tf.int32
```
Args:
x: A `Tensor` or `SparseTensor`.
dtype: The destination type.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x`.
Raises:
TypeError: If `x` cannot be cast to the `dtype`.
"""
with ops.op_scope([x], name, "Cast") as name:
if isinstance(x, ops.SparseTensor):
values_cast = cast(x.values, dtype, name=name)
return ops.SparseTensor(x.indices, values_cast, x.shape)
else:
# TODO(touts): Handle what Josh said.
#
# Could return ops.convert_to_tensor(x, dtype=dtype, ...) here, but that
# allows some conversions that cast() can't do, e.g. casting numbers to
# strings.
x = ops.convert_to_tensor(x, name="x")
if x.dtype.base_dtype == dtype:
return x
return gen_math_ops.cast(x, dtype, name=name)
def saturate_cast(value, dtype, name=None):
"""Performs a safe saturating cast of `value` to `dtype`.
This function casts the input to `dtype` without applying any scaling. If
there is a danger that values would over or underflow in the cast, this op
applies the appropriate clamping before the cast.
Args:
value: A `Tensor`.
dtype: The desired output `DType`.
name: A name for the operation (optional).
Returns:
`value` safely cast to `dtype`.
"""
# When casting to a type with smaller representable range, clamp.
# Note that this covers casting to unsigned types as well.
with ops.op_scope([value], name, "saturate_cast") as name:
value = ops.convert_to_tensor(value, name="value")
dtype = dtypes.as_dtype(dtype).base_dtype
if value.dtype.min < dtype.min:
value = gen_math_ops.maximum(value, ops.convert_to_tensor(
dtype.min, dtype=value.dtype, name="min"))
if value.dtype.max > dtype.max:
value = gen_math_ops.minimum(value, ops.convert_to_tensor(
dtype.max, dtype=value.dtype, name="max"))
return cast(value, dtype, name=name)
def to_float(x, name="ToFloat"):
"""Casts a tensor to type `float32`.
Args:
x: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x` with type `float32`.
Raises:
TypeError: If `x` cannot be cast to the `float32`.
"""
return cast(x, dtypes.float32, name=name)
def to_double(x, name="ToDouble"):
"""Casts a tensor to type `float64`.
Args:
x: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x` with type `float64`.
Raises:
TypeError: If `x` cannot be cast to the `float64`.
"""
return cast(x, dtypes.float64, name=name)
def to_int32(x, name="ToInt32"):
"""Casts a tensor to type `int32`.
Args:
x: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x` with type `int32`.
Raises:
TypeError: If `x` cannot be cast to the `int32`.
"""
return cast(x, dtypes.int32, name=name)
def to_int64(x, name="ToInt64"):
"""Casts a tensor to type `int64`.
Args:
x: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x` with type `int64`.
Raises:
TypeError: If `x` cannot be cast to the `int64`.
"""
return cast(x, dtypes.int64, name=name)
def to_bfloat16(x, name="ToBFloat16"):
"""Casts a tensor to type `bfloat16`.
Args:
x: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor` with same shape as `x` with type `bfloat16`.
Raises:
TypeError: If `x` cannot be cast to the `bfloat16`.
"""
return cast(x, dtypes.bfloat16, name=name)
ops.Tensor._override_operator("__neg__", gen_math_ops.neg)
ops.Tensor._override_operator("__abs__", abs)
# __invert__ corresponds to the ~ operator. Here we follow the numpy convention
# ~ marks an elementwise bit-wise inverse. This is only implemented for boolean
# tensors and will throw a TypeError if used on nonboolean arrays
ops.Tensor._override_operator("__invert__", gen_math_ops.logical_not)
def _OverrideBinaryOperatorHelper(func, op_name, clazz_object=ops.Tensor):
"""Register operators with different tensor and scalar versions.
If `clazz_object` is `SparseTensor`, assumes `func` takes `(sp_indices,
sp_values, sp_shape, dense)` and outputs `(new_sp_values)`.
Args:
func: the operator
op_name: name of the operator being overridden
clazz_object: class to override for. Either `Tensor` or `SparseTensor`.
"""
def binary_op_wrapper(x, y):
with ops.op_scope([x, y], None, op_name) as name:
if not isinstance(y, ops.SparseTensor):
y = ops.convert_to_tensor(y, dtype=x.dtype.base_dtype, name="y")
return func(x, y, name=name)
def binary_op_wrapper_sparse(sp_x, y):
with ops.op_scope([sp_x, y], None, op_name) as name:
y = ops.convert_to_tensor(y, dtype=sp_x.dtype.base_dtype, name="y")
return ops.SparseTensor(sp_x.indices, func(sp_x.indices, sp_x.values,
sp_x.shape, y, name=name),
sp_x.shape)
def r_binary_op_wrapper(y, x):
with ops.op_scope([x, y], None, op_name) as name:
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
return func(x, y, name=name)
if clazz_object is ops.Tensor:
clazz_object._override_operator("__%s__" % op_name, binary_op_wrapper)
del binary_op_wrapper
clazz_object._override_operator("__r%s__" % op_name, r_binary_op_wrapper)
del r_binary_op_wrapper
else:
clazz_object._override_operator("__%s__" % op_name,
binary_op_wrapper_sparse)
del binary_op_wrapper_sparse
# Conversion table for __truediv__. None entries mean no conversion required.
_TRUEDIV_TABLE = {
dtypes.uint8: dtypes.float32,
dtypes.int8: dtypes.float32,
dtypes.int16: dtypes.float32,
dtypes.int32: dtypes.float64,
dtypes.int64: dtypes.float64,
dtypes.float16: None,
dtypes.float32: None,
dtypes.float64: None,
dtypes.complex64: None,
dtypes.complex128: None,
}
# NOTE: the support of "sparse (true)div dense" is currently not baked in into
# "tf.(true_)div()". Until such an API decision is made, the supported usage is
# to explicitly use the "/" operator to invoke either truediv or div.
def _sparse_dense_truediv(sp_indices, sp_values, sp_shape, y, name=None):
"""Internal helper function for 'sp_t / dense_t'."""
with ops.op_scope([sp_indices, sp_values, sp_shape, y],
name, "truediv") as name:
sp_values = ops.convert_to_tensor(sp_values, name="sp_values")
y = ops.convert_to_tensor(y, name="y")
x_dtype = sp_values.dtype.base_dtype
y_dtype = y.dtype.base_dtype
if x_dtype != y_dtype:
raise TypeError("x and y must have the same dtype, got %r != %r" %
(x_dtype, y_dtype))
try:
dtype = _TRUEDIV_TABLE[x_dtype]
except KeyError:
raise TypeError("Invalid dtype %r in __truediv__" % x_dtype)
if dtype is not None:
sp_values = cast(sp_values, dtype)
y = cast(y, dtype)
return gen_sparse_ops.sparse_dense_cwise_div(sp_indices, sp_values,
sp_shape, y, name=name)
def truediv(x, y, name=None):
"""Divides x / y elementwise, always producing floating point results.
The same as `tf.div` for floating point arguments, but casts integer arguments
to floating point before dividing so that the result is always floating point.
This op is generated by normal `x / y` division in Python 3 and in Python 2.7
with `from __future__ import division`. If you want integer division that
rounds down, use `x // y` or `tf.floordiv`.
`x` and `y` must have the same numeric type. If the inputs are floating
point, the output will have the same type. If the inputs are integral, the
inputs are cast to `float32` for `int8` and `int16` and `float64` for `int32`
and `int64` (matching the behavior of Numpy).
Args:
x: `Tensor` numerator of numeric type.
y: `Tensor` denominator of numeric type.
name: A name for the operation (optional).
Returns:
`x / y` evaluated in floating point.
Raises:
TypeError: If `x` and `y` have different dtypes.
"""
with ops.op_scope([x, y], name, "truediv") as name:
x = ops.convert_to_tensor(x, name="x")
y = ops.convert_to_tensor(y, name="y")
x_dtype = x.dtype.base_dtype
y_dtype = y.dtype.base_dtype
if x_dtype != y_dtype:
raise TypeError("x and y must have the same dtype, got %r != %r" %
(x_dtype, y_dtype))
try:
dtype = _TRUEDIV_TABLE[x_dtype]
except KeyError:
raise TypeError("Invalid dtype %r in __truediv__" % x_dtype)
if dtype is not None:
x = cast(x, dtype)
y = cast(y, dtype)
return gen_math_ops.div(x, y, name=name)
def floordiv(x, y, name=None):
"""Divides `x / y` elementwise, rounding down for floating point.
The same as `tf.div(x,y)` for integers, but uses `tf.floor(tf.div(x,y))` for
floating point arguments so that the result is always an integer (though
possibly an integer represented as floating point). This op is generated by
`x // y` floor division in Python 3 and in Python 2.7 with
`from __future__ import division`.
Note that for efficiency, `floordiv` uses C semantics for negative numbers
(unlike Python and Numpy).
`x` and `y` must have the same type, and the result will have the same type
as well.
Args:
x: `Tensor` numerator of real numeric type.
y: `Tensor` denominator of real numeric type.
name: A name for the operation (optional).
Returns:
`x / y` rounded down (except possibly towards zero for negative integers).
Raises:
TypeError: If the inputs are complex.
"""
with ops.op_scope([x, y], name, "floordiv") as name:
x = ops.convert_to_tensor(x, name="x")
dtype = x.dtype
if dtype.is_floating:
return gen_math_ops.floor(gen_math_ops.div(x, y), name=name)
else:
if not dtype.is_integer:
raise TypeError("Expected floating point or integer, got %r" % dtype)
return gen_math_ops.div(x, y, name=name)
def _mul_dispatch(x, y, name=None):
"""Dispatches cwise mul for "Dense*Dense" and "Dense*Sparse"."""
is_tensor_y = isinstance(y, ops.Tensor)
if is_tensor_y:
return gen_math_ops.mul(x, y, name=name)
else:
assert isinstance(y, ops.SparseTensor) # Case: Dense * Sparse.
new_vals = gen_sparse_ops.sparse_dense_cwise_mul(y.indices, y.values,
y.shape, x, name)
return ops.SparseTensor(y.indices, new_vals, y.shape)
_OverrideBinaryOperatorHelper(gen_sparse_ops.sparse_dense_cwise_div, "div",
ops.SparseTensor)
_OverrideBinaryOperatorHelper(_sparse_dense_truediv, "truediv",
ops.SparseTensor)
_OverrideBinaryOperatorHelper(gen_sparse_ops.sparse_dense_cwise_mul, "mul",
ops.SparseTensor)
_OverrideBinaryOperatorHelper(gen_math_ops.add, "add")
_OverrideBinaryOperatorHelper(gen_math_ops.sub, "sub")
_OverrideBinaryOperatorHelper(_mul_dispatch, "mul")
_OverrideBinaryOperatorHelper(gen_math_ops.div, "div")
_OverrideBinaryOperatorHelper(truediv, "truediv")
_OverrideBinaryOperatorHelper(floordiv, "floordiv")
_OverrideBinaryOperatorHelper(gen_math_ops.mod, "mod")
_OverrideBinaryOperatorHelper(pow, "pow")
def logical_xor(x, y, name="LogicalXor"):
"""x ^ y = (x | y) & ~(x & y)."""
# TODO(alemi) Make this a cwise op if people end up relying on it.
return gen_math_ops.logical_and(
gen_math_ops.logical_or(x, y),
gen_math_ops.logical_not(gen_math_ops.logical_and(x, y)),
name=name)
_OverrideBinaryOperatorHelper(gen_math_ops.logical_and, "and")
_OverrideBinaryOperatorHelper(gen_math_ops.logical_or, "or")
_OverrideBinaryOperatorHelper(logical_xor, "xor")
ops.Tensor._override_operator("__lt__", gen_math_ops.less)
ops.Tensor._override_operator("__le__", gen_math_ops.less_equal)
ops.Tensor._override_operator("__gt__", gen_math_ops.greater)
ops.Tensor._override_operator("__ge__", gen_math_ops.greater_equal)
def range(start, limit=None, delta=1, name="range"):
"""Creates a sequence of integers.
Creates a sequence of integers that begins at `start` and extends by
increments of `delta` up to but not including `limit`.
Like the Python builtin `range`, `start` defaults to 0, so that
`range(n) = range(0, n)`.
For example:
```
# 'start' is 3
# 'limit' is 18
# 'delta' is 3
tf.range(start, limit, delta) ==> [3, 6, 9, 12, 15]
# 'limit' is 5
tf.range(limit) ==> [0, 1, 2, 3, 4]
```
Args:
start: A 0-D (scalar) of type `int32`. First entry in sequence.
Defaults to 0.
limit: A 0-D (scalar) of type `int32`. Upper limit of sequence,
exclusive.
delta: A 0-D `Tensor` (scalar) of type `int32`. Optional. Default is 1.
Number that increments `start`.
name: A name for the operation (optional).
Returns:
An 1-D `int32` `Tensor`.
"""
if limit is None:
start, limit = 0, start
return gen_math_ops._range(start, limit, delta, name=name)
@ops.RegisterShape("Range")
def _RangeShape(op):
start_value = tensor_util.constant_value(op.inputs[0])
limit_value = tensor_util.constant_value(op.inputs[1])
delta_value = tensor_util.constant_value(op.inputs[2])
if start_value is None or limit_value is None or delta_value is None:
return [tensor_shape.vector(None)]
else:
return [tensor_shape.vector((limit_value - start_value + delta_value - 1) //
delta_value)]
# Reduction operations
def _ReductionDims(x, reduction_indices):
"""Returns range(0, rank(x)) if reduction_indices is None."""
if reduction_indices is not None:
return reduction_indices
else:
# TODO(zongheng): remove this once rank() supports SparseTensor.
if isinstance(x, ops.SparseTensor):
return range(0, array_ops.size(x.shape))
return range(0, array_ops.rank(x))
def reduce_sum(input_tensor, reduction_indices=None, keep_dims=False,
name=None):
"""Computes the sum of elements across dimensions of a tensor.
Reduces `input_tensor` along the dimensions given in `reduction_indices`.
Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each
entry in `reduction_indices`. If `keep_dims` is true, the reduced dimensions
are retained with length 1.
If `reduction_indices` has no entries, all dimensions are reduced, and a
tensor with a single element is returned.
For example:
```python
# 'x' is [[1, 1, 1]
# [1, 1, 1]]
tf.reduce_sum(x) ==> 6
tf.reduce_sum(x, 0) ==> [2, 2, 2]
tf.reduce_sum(x, 1) ==> [3, 3]
tf.reduce_sum(x, 1, keep_dims=True) ==> [[3], [3]]
tf.reduce_sum(x, [0, 1]) ==> 6
```
Args:
input_tensor: The tensor to reduce. Should have numeric type.
reduction_indices: The dimensions to reduce. If `None` (the default),
reduces all dimensions.
keep_dims: If true, retains reduced dimensions with length 1.
name: A name for the operation (optional).
Returns:
The reduced tensor.
"""
return gen_math_ops._sum(input_tensor, _ReductionDims(input_tensor,
reduction_indices),
keep_dims, name=name)
def reduce_mean(input_tensor, reduction_indices=None, keep_dims=False,
name=None):
"""Computes the mean of elements across dimensions of a tensor.
Reduces `input_tensor` along the dimensions given in `reduction_indices`.
Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each
entry in `reduction_indices`. If `keep_dims` is true, the reduced dimensions
are retained with length 1.
If `reduction_indices` has no entries, all dimensions are reduced, and a
tensor with a single element is returned.
For example:
```python
# 'x' is [[1., 1.]
# [2., 2.]]
tf.reduce_mean(x) ==> 1.5
tf.reduce_mean(x, 0) ==> [1.5, 1.5]
tf.reduce_mean(x, 1) ==> [1., 2.]
```
Args:
input_tensor: The tensor to reduce. Should have numeric type.
reduction_indices: The dimensions to reduce. If `None` (the default),
reduces all dimensions.
keep_dims: If true, retains reduced dimensions with length 1.
name: A name for the operation (optional).
Returns:
The reduced tensor.
"""
return gen_math_ops._mean(input_tensor, _ReductionDims(input_tensor,
reduction_indices),
keep_dims, name=name)
def reduce_prod(input_tensor, reduction_indices=None, keep_dims=False,
name=None):
"""Computes the product of elements across dimensions of a tensor.
Reduces `input_tensor` along the dimensions given in `reduction_indices`.
Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each
entry in `reduction_indices`. If `keep_dims` is true, the reduced dimensions
are retained with length 1.
If `reduction_indices` has no entries, all dimensions are reduced, and a
tensor with a single element is returned.
Args:
input_tensor: The tensor to reduce. Should have numeric type.
reduction_indices: The dimensions to reduce. If `None` (the default),
reduces all dimensions.
keep_dims: If true, retains reduced dimensions with length 1.
name: A name for the operation (optional).
Returns:
The reduced tensor.
"""
return gen_math_ops._prod(input_tensor, _ReductionDims(input_tensor,
reduction_indices),
keep_dims, name=name)
def reduce_min(input_tensor, reduction_indices=None, keep_dims=False,
name=None):
"""Computes the minimum of elements across dimensions of a tensor.
Reduces `input_tensor` along the dimensions given in `reduction_indices`.
Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each
entry in `reduction_indices`. If `keep_dims` is true, the reduced dimensions
are retained with length 1.
If `reduction_indices` has no entries, all dimensions are reduced, and a
tensor with a single element is returned.
Args:
input_tensor: The tensor to reduce. Should have numeric type.
reduction_indices: The dimensions to reduce. If `None` (the default),
reduces all dimensions.
keep_dims: If true, retains reduced dimensions with length 1.
name: A name for the operation (optional).
Returns:
The reduced tensor.
"""
return gen_math_ops._min(input_tensor, _ReductionDims(input_tensor,
reduction_indices),
keep_dims, name=name)
def reduce_max(input_tensor, reduction_indices=None, keep_dims=False,
name=None):
"""Computes the maximum of elements across dimensions of a tensor.
Reduces `input_tensor` along the dimensions given in `reduction_indices`.
Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each
entry in `reduction_indices`. If `keep_dims` is true, the reduced dimensions
are retained with length 1.
If `reduction_indices` has no entries, all dimensions are reduced, and a
tensor with a single element is returned.
Args:
input_tensor: The tensor to reduce. Should have numeric type.
reduction_indices: The dimensions to reduce. If `None` (the default),
reduces all dimensions.
keep_dims: If true, retains reduced dimensions with length 1.
name: A name for the operation (optional).
Returns:
The reduced tensor.
"""
return gen_math_ops._max(input_tensor, _ReductionDims(input_tensor,
reduction_indices),
keep_dims, name=name)
def reduce_all(input_tensor, reduction_indices=None, keep_dims=False,
name=None):
"""Computes the "logical and" of elements across dimensions of a tensor.
Reduces `input_tensor` along the dimensions given in `reduction_indices`.
Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each
entry in `reduction_indices`. If `keep_dims` is true, the reduced dimensions
are retained with length 1.
If `reduction_indices` has no entries, all dimensions are reduced, and a
tensor with a single element is returned.
For example:
```python
# 'x' is [[True, True]
# [False, False]]
tf.reduce_all(x) ==> False
tf.reduce_all(x, 0) ==> [False, False]
tf.reduce_all(x, 1) ==> [True, False]
```
Args:
input_tensor: The boolean tensor to reduce.
reduction_indices: The dimensions to reduce. If `None` (the default),
reduces all dimensions.
keep_dims: If true, retains reduced dimensions with length 1.
name: A name for the operation (optional).
Returns:
The reduced tensor.
"""
return gen_math_ops._all(input_tensor, _ReductionDims(input_tensor,
reduction_indices),
keep_dims, name=name)
def reduce_any(input_tensor, reduction_indices=None, keep_dims=False,
name=None):
"""Computes the "logical or" of elements across dimensions of a tensor.
Reduces `input_tensor` along the dimensions given in `reduction_indices`.
Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each
entry in `reduction_indices`. If `keep_dims` is true, the reduced dimensions
are retained with length 1.
If `reduction_indices` has no entries, all dimensions are reduced, and a
tensor with a single element is returned.
For example:
```python
# 'x' is [[True, True]
# [False, False]]
tf.reduce_any(x) ==> True
tf.reduce_any(x, 0) ==> [True, True]
tf.reduce_any(x, 1) ==> [True, False]
```
Args:
input_tensor: The boolean tensor to reduce.
reduction_indices: The dimensions to reduce. If `None` (the default),
reduces all dimensions.
keep_dims: If true, retains reduced dimensions with length 1.
name: A name for the operation (optional).
Returns:
The reduced tensor.
"""
return gen_math_ops._any(input_tensor, _ReductionDims(input_tensor,
reduction_indices),
keep_dims, name=name)
def trace(x, name=None):
""" Compute the trace of a tensor `x`.
`trace(x)` returns the sum of along the diagonal.
For example:
```python
# 'x' is [[1, 1],
# [1, 1]]
tf.trace(x) ==> 2
# 'x' is [[1,2,3],
# [4,5,6],
# [7,8,9]]
tf.trace(x) ==> 15
```
Args:
x: 2-D tensor.
name: A name for the operation (optional).
Returns:
The trace of input tensor.
"""
with ops.op_scope([x], name, "Trace") as name:
x = ops.convert_to_tensor(x, name="x")
if len(x.get_shape()) != 2:
raise ValueError("Expected a tensor with rank 2, rank %d tensor received"
% len(x.get_shape()))
return reduce_sum(array_ops.diag_part(x), name=name)
def matmul(a, b,
transpose_a=False, transpose_b=False,
a_is_sparse=False, b_is_sparse=False,
name=None):
"""Multiplies matrix `a` by matrix `b`, producing `a` * `b`.
The inputs must be two-dimensional matrices, with matching inner dimensions,
possibly after transposition.
Both matrices must be of the same type. The supported types are:
`float`, `double`, `int32`, `complex64`.
Either matrix can be transposed on the fly by setting the corresponding flag
to `True`. This is `False` by default.
If one or both of the matrices contain a lot of zeros, a more efficient
multiplication algorithm can be used by setting the corresponding
`a_is_sparse` or `b_is_sparse` flag to `True`. These are `False` by default.
For example:
```python
# 2-D tensor `a`
a = tf.constant([1, 2, 3, 4, 5, 6], shape=[2, 3]) => [[1. 2. 3.]
[4. 5. 6.]]
# 2-D tensor `b`
b = tf.constant([7, 8, 9, 10, 11, 12], shape=[3, 2]) => [[7. 8.]
[9. 10.]
[11. 12.]]
c = tf.matmul(a, b) => [[58 64]
[139 154]]
```
Args:
a: `Tensor` of type `float`, `double`, `int32` or `complex64`.
b: `Tensor` with same type as `a`.
transpose_a: If `True`, `a` is transposed before multiplication.
transpose_b: If `True`, `b` is transposed before multiplication.
a_is_sparse: If `True`, `a` is treated as a sparse matrix.
b_is_sparse: If `True`, `b` is treated as a sparse matrix.
name: Name for the operation (optional).
Returns:
A `Tensor` of the same type as `a`.
"""
with ops.op_scope([a, b], name, "MatMul") as name:
a = ops.convert_to_tensor(a, name="a")
b = ops.convert_to_tensor(b, name="b")
sparse_matmul_types = [dtypes.bfloat16, dtypes.float32]
use_sparse_matmul = (a.dtype in sparse_matmul_types and
b.dtype in sparse_matmul_types and
(a_is_sparse or b_is_sparse))
if dtypes.bfloat16 in (a.dtype, b.dtype):
# matmul currently doesn't handle bfloat16 inputs.
use_sparse_matmul = True
if use_sparse_matmul:
return sparse_matmul(a, b,
transpose_a=transpose_a,
transpose_b=transpose_b,
a_is_sparse=a_is_sparse,
b_is_sparse=b_is_sparse,
name=name)
else:
return gen_math_ops._mat_mul(a, b,
transpose_a=transpose_a,
transpose_b=transpose_b,
name=name)
sparse_matmul = gen_math_ops._sparse_mat_mul
batch_matmul = gen_math_ops._batch_mat_mul
ops.RegisterShape("MatMul")(common_shapes.matmul_shape)
ops.RegisterShape("SparseMatMul")(common_shapes.matmul_shape)
@ops.RegisterStatistics("MatMul", "flops")
def _calc_mat_mul_flops(graph, node):
"""Calculates the compute resources needed for MatMul."""
transpose_a = node.attr["transpose_a"].b
a_shape = graph_util.tensor_shape_from_node_def_name(graph, node.input[0])
a_shape.assert_is_fully_defined()
if transpose_a:
k = int(a_shape[0])
else:
k = int(a_shape[1])
output_shape = graph_util.tensor_shape_from_node_def_name(graph, node.name)
output_shape.assert_is_fully_defined()
output_count = np.prod(output_shape.as_list())
return ops.OpStats("flops", (k * output_count * 2))
@ops.RegisterStatistics("MatMul", "weight_parameters")
def _calc_mat_mul_weight_parameters(graph, node):
"""Calculates the on-disk size of the weights for MatMul."""
# We assume here that the weights are always in the second input to the op,
# which is generally true by convention for fully-connected layers, but not
# enforced or checked.
weights_shape = graph_util.tensor_shape_from_node_def_name(graph,
node.input[1])
weights_shape.assert_is_fully_defined()
return ops.OpStats("weight_parameters",
(int(weights_shape[1]) * int(weights_shape[0])))
def _as_indexed_slices(x):
"""Convert 'x' to IndexedSlices.
Convert a dense Tensor to a block-sparse IndexedSlices.
Args:
x: Either a Tensor object, or an IndexedSlices object.
Returns:
An IndexedSlices object.
Raises:
TypeError: If 'x' is not a Tensor or an IndexedSlices object.
"""
# TODO(touts): op_scope
if not isinstance(x, (ops.Tensor, ops.IndexedSlices)):
raise TypeError("Not a Tensor or IndexedSlices: %s" % type(x))
if isinstance(x, ops.IndexedSlices):
return x
x_shape = array_ops.shape(x)
return ops.IndexedSlices(x, range(0, x_shape[0]), x_shape)
def _as_indexed_slices_list(inputs):
"""Convert all elements of 'inputs' to IndexedSlices.
Additionally, homogenize the types of all the indices to
either int32 or int64.
Args:
inputs: List containing either Tensor or IndexedSlices objects.
Returns:
A list of IndexedSlices objects.
Raises:
TypeError: If 'inputs' is not a list or a tuple.
"""
if not isinstance(inputs, (list, tuple)):
raise TypeError("Expected a list or tuple, not a %s" % type(inputs))
outputs = [_as_indexed_slices(i) for i in inputs]
with_int32_index = [o.indices for o in outputs
if o.indices.dtype == dtypes.int32]
if not with_int32_index or len(with_int32_index) == len(outputs):
return outputs
casted_outputs = []
for o in outputs:
if o.indices.dtype == dtypes.int32:
casted_outputs.append(
ops.IndexedSlices(o.values, cast(o.indices, dtypes.int64),
o.dense_shape))
else:
casted_outputs.append(o)
return casted_outputs
def accumulate_n(inputs, shape=None, tensor_dtype=None, name=None):
"""Returns the element-wise sum of a list of tensors.
Optionally, pass `shape` and `tensor_dtype` for shape and type checking,
otherwise, these are inferred.
For example:
```python
# tensor 'a' is [[1, 2], [3, 4]]
# tensor `b` is [[5, 0], [0, 6]]
tf.accumulate_n([a, b, a]) ==> [[7, 4], [6, 14]]
# Explicitly pass shape and type
tf.accumulate_n([a, b, a], shape=[2, 2], tensor_dtype=tf.int32)
==> [[7, 4], [6, 14]]
```
Args:
inputs: A list of `Tensor` objects, each with same shape and type.
shape: Shape of elements of `inputs`.
tensor_dtype: The type of `inputs`.
name: A name for the operation (optional).
Returns:
A `Tensor` of same shape and type as the elements of `inputs`.
Raises:
ValueError: If `inputs` don't all have same shape and dtype or the shape
cannot be inferred.
"""
if tensor_dtype is None:
if not inputs or not isinstance(inputs, (list, tuple)):
raise ValueError("inputs must be a list of at least one Tensor with the "
"same dtype and shape")
inputs = ops.convert_n_to_tensor_or_indexed_slices(inputs)
if not all(isinstance(x, ops.Tensor) for x in inputs):
raise ValueError("inputs must be a list of at least one Tensor with the "
"same dtype and shape")
if not all(x.dtype == inputs[0].dtype for x in inputs):
raise ValueError("inputs must be a list of at least one Tensor with the "
"same dtype and shape")
tensor_dtype = inputs[0].dtype
if shape is not None:
shape = tensor_shape.as_shape(shape)
else:
shape = tensor_shape.unknown_shape()
for input_tensor in inputs:
if isinstance(input_tensor, ops.Tensor):
shape = shape.merge_with(input_tensor.get_shape())
if not shape.is_fully_defined():
# TODO(pbar): Make a version of assign_add that accepts an uninitialized
# lvalue, and takes its shape from that? This would allow accumulate_n to
# work in all situations that add_n currently works.
raise ValueError("Cannot infer the shape of the accumulator for "
"accumulate_n. Pass the shape argument, or set the shape "
"of at least one of the inputs.")
with ops.op_scope(inputs, name, "AccumulateN") as name:
if len(inputs) == 1:
return inputs[0]
var = gen_state_ops._temporary_variable(shape=shape, dtype=tensor_dtype)
var_name = var.op.name
var = state_ops.assign(var, array_ops.zeros_like(inputs[0]))
update_ops = []
for input_tensor in inputs:
op = state_ops.assign_add(var, input_tensor, use_locking=True)
update_ops.append(op)
with ops.control_dependencies(update_ops):
return gen_state_ops._destroy_temporary_variable(var,
var_name=var_name,
name=name)
@ops.RegisterShape("BatchMatMul")
def _BatchMatMulShape(op):
"""Shape function for BatchMatMul op."""
a_shape = op.inputs[0].get_shape()
adj_a = op.get_attr("adj_x")
b_shape = op.inputs[1].get_shape()
adj_b = op.get_attr("adj_y")
if a_shape.dims is None and b_shape.dims is None:
return [tensor_shape.unknown_shape()]
batch_dims = a_shape[:-2].merge_with(b_shape[:-2])
output_rows = a_shape[-1] if adj_a else a_shape[-2]
output_cols = b_shape[-2] if adj_b else b_shape[-1]
inner_a = a_shape[-2] if adj_a else a_shape[-1]
inner_b = b_shape[-1] if adj_b else b_shape[-2]
inner_a.assert_is_compatible_with(inner_b)
return [batch_dims.concatenate([output_rows, output_cols])]
def sigmoid(x, name=None):
"""Computes sigmoid of `x` element-wise.
Specifically, `y = 1 / (1 + exp(-x))`.
Args:
x: A Tensor with type `float`, `double`, `int32`, `complex64`, `int64`,
or `qint32`.
name: A name for the operation (optional).
Returns:
A Tensor with the same type as `x` if `x.dtype != qint32`
otherwise the return type is `quint8`.
"""
with ops.op_scope([x], name, "Sigmoid") as name:
x = ops.convert_to_tensor(x, name="x")
return gen_math_ops._sigmoid(x, name=name)
def tanh(x, name=None):
"""Computes hyperbolic tangent of `x` element-wise.
Args:
x: A Tensor with type `float`, `double`, `int32`, `complex64`, `int64`,
or `qint32`.
name: A name for the operation (optional).
Returns:
A Tensor with the same type as `x` if `x.dtype != qint32` otherwise
the return type is `quint8`.
"""
with ops.op_scope([x], name, "Tanh") as name:
x = ops.convert_to_tensor(x, name="x")
return gen_math_ops._tanh(x, name=name)
ops.RegisterShape("Abs")(common_shapes.unchanged_shape)
ops.RegisterShape("Ceil")(common_shapes.unchanged_shape)
ops.RegisterShape("Conj")(common_shapes.unchanged_shape)
ops.RegisterShape("Cos")(common_shapes.unchanged_shape)
ops.RegisterShape("Cross")(common_shapes.unchanged_shape)
ops.RegisterShape("Exp")(common_shapes.unchanged_shape)
ops.RegisterShape("Floor")(common_shapes.unchanged_shape)
ops.RegisterShape("Imag")(common_shapes.unchanged_shape)
ops.RegisterShape("Inv")(common_shapes.unchanged_shape)
ops.RegisterShape("IsFinite")(common_shapes.unchanged_shape)
ops.RegisterShape("IsInf")(common_shapes.unchanged_shape)
ops.RegisterShape("IsNan")(common_shapes.unchanged_shape)
ops.RegisterShape("Log")(common_shapes.unchanged_shape)
ops.RegisterShape("LogicalNot")(common_shapes.unchanged_shape)
ops.RegisterShape("Neg")(common_shapes.unchanged_shape)
ops.RegisterShape("Real")(common_shapes.unchanged_shape)
ops.RegisterShape("Rsqrt")(common_shapes.unchanged_shape)
ops.RegisterShape("Sign")(common_shapes.unchanged_shape)
ops.RegisterShape("Sin")(common_shapes.unchanged_shape)
ops.RegisterShape("Sqrt")(common_shapes.unchanged_shape)
ops.RegisterShape("Square")(common_shapes.unchanged_shape)
ops.RegisterShape("Sigmoid")(common_shapes.unchanged_shape)
ops.RegisterShape("Tanh")(common_shapes.unchanged_shape)
ops.RegisterShape("Lgamma")(common_shapes.unchanged_shape)
ops.RegisterShape("Digamma")(common_shapes.unchanged_shape)
ops.RegisterShape("Erf")(common_shapes.unchanged_shape)
ops.RegisterShape("Erfc")(common_shapes.unchanged_shape)
ops.RegisterShape("Cast")(common_shapes.unchanged_shape)
ops.RegisterShape("ComplexAbs")(common_shapes.unchanged_shape)
ops.RegisterShape("FFT")(common_shapes.unchanged_shape)
ops.RegisterShape("IFFT")(common_shapes.unchanged_shape)
ops.RegisterShape("FFT2D")(common_shapes.unchanged_shape)
ops.RegisterShape("IFFT2D")(common_shapes.unchanged_shape)
ops.RegisterShape("FFT3D")(common_shapes.unchanged_shape)
ops.RegisterShape("IFFT3D")(common_shapes.unchanged_shape)
ops.RegisterShape("BatchFFT")(common_shapes.unchanged_shape)
ops.RegisterShape("BatchIFFT")(common_shapes.unchanged_shape)
ops.RegisterShape("BatchFFT2D")(common_shapes.unchanged_shape)
ops.RegisterShape("BatchIFFT2D")(common_shapes.unchanged_shape)
ops.RegisterShape("BatchFFT3D")(common_shapes.unchanged_shape)
ops.RegisterShape("BatchIFFT3D")(common_shapes.unchanged_shape)
@ops.RegisterShape("Add")
@ops.RegisterShape("Complex")
@ops.RegisterShape("Div")
@ops.RegisterShape("Equal")
@ops.RegisterShape("Greater")
@ops.RegisterShape("GreaterEqual")
@ops.RegisterShape("Igamma")
@ops.RegisterShape("Igammac")
@ops.RegisterShape("Zeta")
@ops.RegisterShape("Polygamma")
@ops.RegisterShape("Less")
@ops.RegisterShape("LessEqual")
@ops.RegisterShape("LogicalAnd")
@ops.RegisterShape("LogicalOr")
@ops.RegisterShape("Maximum")
@ops.RegisterShape("Minimum")
@ops.RegisterShape("Mod")
@ops.RegisterShape("Mul")
@ops.RegisterShape("NotEqual")
@ops.RegisterShape("Pow")
@ops.RegisterShape("Sub")
@ops.RegisterShape("SquaredDifference")
def _BroadcastShape(op):
"""Common shape function for binary operators that broadcast their inputs."""
shape_x = op.inputs[0].get_shape()
shape_y = op.inputs[1].get_shape()
if shape_x.ndims is None or shape_y.ndims is None:
return [tensor_shape.unknown_shape()]
# To compute the broadcasted dimensions, we zip together shape_x and shape_y,
# and pad with 1 to make them the same length.
broadcasted_dims = reversed(list(six.moves.zip_longest(
reversed(shape_x.dims),
reversed(shape_y.dims),
fillvalue=tensor_shape.Dimension(1))))
# Next we combine the dimensions according to the numpy broadcasting rules.
# http://docs.scipy.org/doc/numpy/user/basics.broadcasting.html
return_dims = []
for (dim_x, dim_y) in broadcasted_dims:
if dim_x.value is None or dim_y.value is None:
# One or both dimensions is unknown. If either dimension is greater than
# 1, we assume that the program is correct, and the other dimension will
# be broadcast to match it.
# TODO(mrry): If we eliminate the shape checks in C++, we must still
# assert that the unknown dim is either 1 or the same as the known dim.
if dim_x.value is not None and dim_x.value > 1:
return_dims.append(dim_x)
elif dim_y.value is not None and dim_y.value > 1:
return_dims.append(dim_y)
else:
return_dims.append(None)
elif dim_x.value == 1:
# We will broadcast dim_x to dim_y.
return_dims.append(dim_y)
elif dim_y.value == 1:
# We will broadcast dim_y to dim_x.
return_dims.append(dim_x)
elif dim_x.value == dim_y.value:
# The dimensions are compatible, so output is the same size in that
# dimension.
return_dims.append(dim_x.merge_with(dim_y))
else:
raise ValueError("Incompatible shapes for broadcasting: %s and %s"
% (shape_x, shape_y))
return [tensor_shape.TensorShape(return_dims)]
@ops.RegisterShape("SparseDenseCwiseMul")
@ops.RegisterShape("SparseDenseCwiseDiv")
@ops.RegisterShape("SparseDenseCwiseAdd")
def _SparseDenseBinaryOpShape(op): # pylint: disable=invalid-name
"""Common shape for 'sparse <binary cwise op> dense -> sparse' operators."""
nnz = op.inputs[1].get_shape()[0]
return [tensor_shape.TensorShape(nnz)]
@ops.RegisterShape("AddN")
def _AddNShape(op):
merged_shape = tensor_shape.unknown_shape()
for input_ in op.inputs:
merged_shape = merged_shape.merge_with(input_.get_shape())
return [merged_shape]
@ops.RegisterShape("Select")
def _SelectShape(op):
"""Shape function for SelectOp."""
# The inputs 'then' and 'else' must have the same shape.
# The input 'cond' must either have the same shape as 'then' and
# 'else', or be a vector if 'then' and 'else' are at least vectors.
c_shape = op.inputs[0].get_shape()
t_shape = op.inputs[1].get_shape()
e_shape = op.inputs[2].get_shape()
t_e_shape = t_shape.merge_with(e_shape)
c_shape_list = c_shape.as_list() if c_shape.ndims is not None else None
t_e_shape_list = t_e_shape.as_list() if t_e_shape.ndims is not None else None
if c_shape_list is not None and t_e_shape_list is not None:
if len(c_shape_list) != 1:
# If the rank of 'cond' is != 1, the shape must match 'then' and 'else'
t_e_shape = t_e_shape.merge_with(c_shape)
if t_e_shape_list:
# If then and else are not scalars, then cond must be at least
# a vector, and its first value must match that of 'else'
c_shape = c_shape.with_rank_at_least(1)
if len(c_shape.as_list()) == 1:
c_shape.merge_with(tensor_shape.vector(t_e_shape_list[0]))
return [t_e_shape]
@ops.RegisterShape("ArgMax")
@ops.RegisterShape("ArgMin")
def _ArgOpShape(op):
"""Common shape function for arg-reduction ops."""
dimension_shape = op.inputs[1].get_shape()
dimension_shape.assert_is_compatible_with(tensor_shape.scalar())
input_shape = op.inputs[0].get_shape()
if input_shape.ndims is None:
return [tensor_shape.unknown_shape()]
elif input_shape.ndims <= 1:
return [tensor_shape.scalar()]
dimension = tensor_util.constant_value(op.inputs[1])
if dimension is None:
return [tensor_shape.unknown_shape(ndims=input_shape.ndims - 1)]
elif 0 <= dimension and dimension < input_shape.ndims:
returned_shape = []
for i, dim in enumerate(input_shape.dims):
if i != dimension:
returned_shape.append(dim)
return [tensor_shape.TensorShape(returned_shape)]
else:
raise ValueError(
"dimension (%d) must be in the range [0, %d), where %d is the number "
"of dimensions in the input"
% (dimension, input_shape.ndims, input_shape.ndims))
@ops.RegisterShape("All")
@ops.RegisterShape("Any")
@ops.RegisterShape("Max")
@ops.RegisterShape("Mean")
@ops.RegisterShape("Min")
@ops.RegisterShape("Prod")
@ops.RegisterShape("Sum")
def _ReductionShape(op):
"""Common shape function for reduction ops."""
input_shape = op.inputs[0].get_shape()
reduction_indices = tensor_util.constant_value(op.inputs[1])
keep_dims = op.get_attr("keep_dims")
if reduction_indices is None or input_shape.ndims is None:
if keep_dims:
return [tensor_shape.unknown_shape(ndims=input_shape.ndims)]
else:
return [tensor_shape.unknown_shape()]
# Turn reduction_indices from scalar to vector if necessary
reduction_indices = np.ravel(reduction_indices)
for reduction_index in reduction_indices:
if (reduction_index < -input_shape.ndims or
reduction_index >= input_shape.ndims):
raise ValueError("Invalid reduction dimension %d for input with %d "
"dimensions" % (reduction_index, input_shape.ndims))
reduction_indices = set([(x + input_shape.ndims) % input_shape.ndims
for x in reduction_indices])
returned_dims = []
if keep_dims:
for i, dim in enumerate(input_shape.dims):
if i in reduction_indices:
returned_dims.append(1)
else:
returned_dims.append(dim)
else:
for i, dim in enumerate(input_shape.dims):
if i not in reduction_indices:
returned_dims.append(dim)
return [tensor_shape.TensorShape(returned_dims)]
@ops.RegisterShape("SegmentMax")
@ops.RegisterShape("SegmentMean")
@ops.RegisterShape("SegmentMin")
@ops.RegisterShape("SegmentProd")
@ops.RegisterShape("SegmentSum")
def _SegmentReductionShape(op):
"""Common shape function for segment reduction ops."""
data_shape = op.inputs[0].get_shape()
segment_ids_shape = op.inputs[1].get_shape()
segment_ids_shape.assert_has_rank(1)
return [tensor_shape.TensorShape([None]).concatenate(data_shape[1:])]
@ops.RegisterShape("SparseSegmentMean")
@ops.RegisterShape("SparseSegmentSqrtN")
@ops.RegisterShape("SparseSegmentSum")
def _SparseSegmentReductionShape(op):
"""Common shape function for sparse segment reduction ops."""
data_shape = op.inputs[0].get_shape()
indices_shape = op.inputs[1].get_shape()
indices_shape.assert_has_rank(1)
segment_ids_shape = op.inputs[2].get_shape()
segment_ids_shape.assert_has_rank(1)
indices_shape.assert_is_compatible_with(segment_ids_shape)
return [tensor_shape.TensorShape([None]).concatenate(data_shape[1:])]
@ops.RegisterShape("SparseSegmentMeanGrad")
@ops.RegisterShape("SparseSegmentSqrtNGrad")
# pylint: disable=invalid-name
def _SparseSegmentReductionGradShape(op):
"""Shape function for the SparseSegment[Mean|SqrtN]Grad ops."""
input_shape = op.inputs[0].get_shape()
indices_shape = op.inputs[1].get_shape().with_rank(1)
unused_segment_ids_shape = op.inputs[2].get_shape().merge_with(indices_shape)
unused_output_dim0_shape = op.inputs[3].get_shape().merge_with(
tensor_shape.scalar())
dim0 = tensor_util.constant_value(op.inputs[3])
return [tensor_shape.TensorShape([dim0]).concatenate(input_shape[1:])]
# pylint: enable=invalid-name
@ops.RegisterShape("UnsortedSegmentSum")
def _UnsortedSegmentSumShape(op):
"""Shape function for UnsortedSegmentSum."""
data_shape = op.inputs[0].get_shape()
segment_ids_shape = op.inputs[1].get_shape()
mid = segment_ids_shape.ndims
if mid is None:
return [tensor_shape.unknown_shape()]
else:
num_segments = tensor_util.constant_value(op.inputs[2])
return [tensor_shape.TensorShape([num_segments]).concatenate(
data_shape[mid:])]
@ops.RegisterShape("LinSpace")
def _LinspaceShape(op):
num = tensor_util.constant_value(op.inputs[2])
return [tensor_shape.vector(num)]
def reduced_shape(input_shape, axes):
"""Helper function for reduction ops.
Args:
input_shape: 1-D Tensor, the shape of the Tensor being reduced.
axes: 1-D Tensor, the reduction axes.
Returns:
A 1-D Tensor, the output shape as if keep_dims were set to True.
"""
# Example:
# cast needed for SparseTensor reductions
input_shape = to_int32(input_shape) # [2, 3, 5, 7]
axes = to_int32(axes) # [1, 2]
input_rank = array_ops.size(input_shape) # 4
axes = (axes + input_rank) % input_rank
axes_shape = array_ops.shape(axes) # [2]
return gen_data_flow_ops.dynamic_stitch( # [2, 1, 1, 7]
[range(input_rank), # [0, 1, 2, 3]
axes], # [1, 2]
[input_shape, # [2, 3, 5, 7]
array_ops.fill(axes_shape, 1)]) # [1, 1]<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from django.conf.urls import patterns, include
from . import views, customadmin, admin
urlpatterns = patterns('',<|fim▁hole|> (r'^test_admin/admin2/', include(customadmin.site.urls)),
(r'^test_admin/admin3/', include(admin.site.urls), dict(form_url='pony')),
(r'^test_admin/admin4/', include(customadmin.simple_site.urls)),
(r'^test_admin/admin5/', include(admin.site2.urls)),
)<|fim▁end|> | (r'^test_admin/admin/doc/', include('django.contrib.admindocs.urls')),
(r'^test_admin/admin/secure-view/$', views.secure_view),
(r'^test_admin/admin/', include(admin.site.urls)), |
<|file_name|>Table.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { StandardProps } from '..';
export interface TableProps extends StandardProps<TableBaseProps, TableClassKey> {
component?: React.ReactType<TableBaseProps>;
}
export type TableBaseProps = React.TableHTMLAttributes<HTMLTableElement>;<|fim▁hole|>
declare const Table: React.ComponentType<TableProps>;
export default Table;<|fim▁end|> |
export type TableClassKey = 'root'; |
<|file_name|>rpc.py<|end_file_name|><|fim▁begin|># Copyright 2015 StackHut Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
StackHut interface and modifications to Barrister RPC library
"""
import os
import json
import uuid
import signal
from enum import Enum
import sh
from ..barrister import err_response, ERR_PARSE, ERR_INVALID_REQ, ERR_METHOD_NOT_FOUND, \
ERR_INVALID_PARAMS, ERR_INTERNAL, ERR_UNKNOWN, ERR_INVALID_RESP, \
parse, contract_from_file, RpcException
from ..utils import log
CONTRACTFILE = '.api.json'
IDLFILE = 'api.idl'
REQ_FIFO = '.req.json'
RESP_FIFO = '.resp.json'
"""
High-level interface into the IDL file
- based on the JSON compiled output that is parsed into an AST
- used from runtime introspection
"""
class ContactTypes(Enum):
int = 1
string = 2
bool = 3
array = 4
obj = 5
def render_signature(func):
def render_params(p):
pp_p = "{} {}".format(p.type, p.name)
return '[]' + pp_p if p.is_array else pp_p
params_t = str.join(', ', [render_params(p) for p in func.params])
if func.returns is not None:
return "{}({}) {}".format(func.name, params_t, render_params(func.returns))
else:
return "{}({}) {}".format(func.name, params_t)
def load_contract_file():
return contract_from_file(CONTRACTFILE)
def generate_contract_file():
"""
Generate the IDL -> JSON Contract file
main interface into barrister parser
"""
if not os.path.exists(IDLFILE):
raise AssertionError("Cannot find 'api.idl' interface definition file")
with open(IDLFILE, 'r') as idl_file, open(CONTRACTFILE, "w") as contract_file:
parsed = parse(idl_file, IDLFILE)
contract_file.write(json.dumps(parsed, indent=4))
####################################################################################################
# Error handling
ERR_SERVICE = -32002
class ParseError(RpcException):
def __init__(self, data=None):
super().__init__(ERR_PARSE, 'Parse Error', data)
class InvalidReqError(RpcException):
def __init__(self, data=None):
super().__init__(ERR_INVALID_REQ, 'Invalid Request', data)
class MethodNotFoundError(RpcException):
def __init__(self, data=None):
super().__init__(ERR_METHOD_NOT_FOUND, 'Method Not Found', data)
class InternalError(RpcException):
def __init__(self, msg='', data=None):
super().__init__(ERR_INTERNAL, 'Internal Error - {}'.format(msg), data)
class ServiceError(RpcException):
def __init__(self, msg, data=None):
super().__init__(ERR_SERVICE, 'Service Error - {}'.format(msg), data)
class CustomError(RpcException):
def __init__(self, code, msg, data=None):
super().__init__(code, 'Error - {}'.format(msg), data)
class NonZeroExitError(RpcException):
def __init__(self, exit_code, stderr):
data = dict(exit_code=exit_code, stderr=stderr)
super().__init__(-32001, 'Sub-command returned a non-zero exit', data)
def exc_to_json_error(e, req_id=None):
return err_response(req_id, e.code, e.msg, e.data)
from enum import Enum
class SHCmds(Enum):
startup = 1
shutdown = 2
preBatch = 3
postBatch = 4
def add_get_id(d):
"""add id to json rpc if not present"""
if 'id' not in d:
d['id'] = str(uuid.uuid4())
return d['id']
class StackHutRPC:
"""
Alt. implementation of Barrister.server modified for StackHut needs
Performs
* 'Type'-checking of requests and responces per interface def
* loading the lang-specfic shim/client
* passing messages between the runner and shim/client process
"""
def __init__(self, backend, shim_cmd):
self.contract = contract_from_file(CONTRACTFILE)
self.backend = backend
# setup fifos
os.mkfifo(REQ_FIFO)<|fim▁hole|> os.mkfifo(RESP_FIFO)
# run the shim
cmd = sh.Command(shim_cmd[0])
self.p = cmd(shim_cmd[1:], _bg=True, _out=lambda x: log.debug("Runner - {}".format(x.rstrip())),
_err=lambda x: log.error("Runner - {}".format(x.rstrip())))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
def handler(signum, frame):
log.error("Force-quitting RPC subprocess")
self.p.kill()
raise TimeoutError()
# Set the signal handler and a 5-second alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(5)
# send shutdown msg to each iface
for iface in self.contract.interfaces.keys():
log.debug("Send shutdown to {}".format(iface))
self._cmd_call('{}.{}'.format(iface, SHCmds.shutdown.name))
log.debug("Terminating RPC sub-process")
try:
self.p.terminate()
self.p.wait()
except sh.SignalException_15:
log.warn("RPC subprocess shutdown uncleanly")
pass
signal.alarm(0)
def _cmd_call(self, cmd):
log.debug('Sending cmd message - {}'.format(cmd))
resp = self._sub_call(cmd, [], 'shcmd')
log.debug("Cmd response - {}".format(resp))
def _req_call(self, req):
"""Make RPC call for a single request"""
req_id = None
try:
if type(req) is not dict:
raise InvalidReqError(dict(msg="%s is not an object.".format(req)))
# massage the data (if needed)
req_id = add_get_id(req)
if 'jsonrpc' not in req:
req['jsonrpc'] = "2.0"
if "method" not in req:
raise InvalidReqError(dict(msg="No method"))
# return the idl - TODO - move into Scala
if req['method'] == "common.barrister-idl" or req['method'] == "getIdl":
return self.contract.idl_parsed
# add the default interface if none exists
if req['method'].find('.') < 0:
req['method'] = "{}.{}".format('Default', req['method'])
# NOTE - would setup context and run pre/post filters here in Barrister
# Ok, - we're good to go
method = req["method"]
iface_name, func_name = method.split('.')
params = req.get('params', [])
self.contract.validate_request(iface_name, func_name, params)
result = self._sub_call(method, params, req_id)
self.contract.validate_response(iface_name, func_name, result)
resp = dict(jsonrpc="2.0", id=req_id, result=result)
except RpcException as e:
resp = exc_to_json_error(e, req_id)
except Exception as e:
_e = InternalError('Exception', dict(exception=repr(e)))
resp = exc_to_json_error(_e, req_id)
return resp
def _sub_call(self, method, params, req_id):
"""Acutal call to the shim/client subprocess"""
self.backend.create_request_dir(req_id)
# create the (sub-)req
sub_req = dict(method=method, params=params, req_id=req_id)
# blocking-wait to send the request
with open(REQ_FIFO, "w") as f:
f.write(json.dumps(sub_req))
# blocking-wait to read the resp
with open(RESP_FIFO, "r") as f:
sub_resp = json.loads(f.read())
# check the response
if 'error' in sub_resp:
error_code = sub_resp['error']
log.debug(sub_resp)
if error_code == ERR_METHOD_NOT_FOUND:
raise MethodNotFoundError()
elif error_code == ERR_INTERNAL:
raise InternalError(sub_resp['msg'], sub_resp['data'])
else:
raise CustomError(error_code, sub_resp['msg'], sub_resp['data'])
self.backend.del_request_dir(req_id)
# validate and return the response
result = sub_resp['result']
return result
def call(self, task_req):
"""Make RPC call for given task"""
# Massage the data
try:
req = task_req['request']
if type(req) is list:
if len(req) < 1:
return exc_to_json_error(InvalidReqError(data=dict(msg="Empty Batch")))
# find batch interface
iface_name = None
first_method = req[0].get('method', None)
if first_method:
iface_name = 'Default' if first_method.find('.') < 0 else first_method.split('.')[0]
if iface_name:
self._cmd_call('{}.{}'.format(iface_name, SHCmds.preBatch.name))
task_resp = [self._req_call(r) for r in req]
if iface_name:
self._cmd_call('{}.{}'.format(iface_name, SHCmds.postBatch.name))
else:
task_resp = self._req_call(req)
except Exception as e:
task_resp = exc_to_json_error(InternalError(repr(e)))
return task_resp<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Settings and configuration for Django.
Values will be read from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global settings file for
a list of all possible variables.
"""
import importlib
import os
import time
from aiohttp.log import web_logger
from . import global_settings
ENVIRONMENT_VARIABLE = "AIOWEB_SETTINGS_MODULE"<|fim▁hole|>
class ImproperlyConfigured(BaseException):
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class BaseSettings(object):
"""
Common logic for settings whether set by a module or by the user.
"""
def __setattr__(self, name, value):
if name in ("MEDIA_URL", "STATIC_URL") and value and not value.endswith('/'):
raise ImproperlyConfigured("If set, %s must end with a slash" % name)
object.__setattr__(self, name, value)
class Settings(BaseSettings):
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):
if setting.isupper():
setattr(self, setting, getattr(global_settings, setting))
# store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
if self.SETTINGS_MODULE:
try:
mod = importlib.import_module(self.SETTINGS_MODULE)
tuple_settings = (
"APPS",
)
self._explicit_settings = set()
for setting in dir(mod):
if setting.isupper():
setting_value = getattr(mod, setting)
if (setting in tuple_settings and
not isinstance(setting_value, (list, tuple))):
raise ImproperlyConfigured("The %s setting must be a list or a tuple. " % setting)
setattr(self, setting, setting_value)
self._explicit_settings.add(setting)
except ImportError:
web_logger.warn("Failed to import settings module")
else:
web_logger.warn("No settings module specified")
def is_overridden(self, setting):
return setting in self._explicit_settings
def __repr__(self):
return '<%(cls)s "%(settings_module)s">' % {
'cls': self.__class__.__name__,
'settings_module': self.SETTINGS_MODULE,
}
class UserSettingsHolder(BaseSettings):
"""
Holder for user configured settings.
"""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.__dict__['_deleted'] = set()
self.default_settings = default_settings
def __getattr__(self, name):
if name in self._deleted:
raise AttributeError
return getattr(self.default_settings, name)
def __setattr__(self, name, value):
self._deleted.discard(name)
super(UserSettingsHolder, self).__setattr__(name, value)
def __delattr__(self, name):
self._deleted.add(name)
if hasattr(self, name):
super(UserSettingsHolder, self).__delattr__(name)
def __dir__(self):
return sorted(
s for s in list(self.__dict__) + dir(self.default_settings)
if s not in self._deleted
)
def is_overridden(self, setting):
deleted = (setting in self._deleted)
set_locally = (setting in self.__dict__)
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
return (deleted or set_locally or set_on_default)
def __repr__(self):
return '<%(cls)s>' % {
'cls': self.__class__.__name__,
}
settings = Settings(os.environ.get(ENVIRONMENT_VARIABLE))<|fim▁end|> | |
<|file_name|>conditions.py<|end_file_name|><|fim▁begin|># Past examples are programmatically insecure
# You require arguments to be passed in but what if the wrong arguments are provided?
# Look at the timestable solution which changes numbers to text - what happens if you provide the number 30?
#
# One way of controlling these things uses conditions
# These enable specific operations to be carried out "if" something is the case or "else" something else is the case
a = 5
# first condition trial
if a >= 5:
print("Value is greater than 5")
else:
print("Value is less than 5")
# second condition trial
if a >= 5:
print("Value is greater than 5")
elif a < 5:
print("Value is less than 5")
else:
print("Value is 5")
# if and (2 conditions)
a=3
b=5
if (a==3) and (b==5):
print("a and b are as expected - great :)")
else:<|fim▁hole|> print("a and b not as expected - not great :(")<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! This module contains shared types and messages for use by devtools/script.
//! The traits are here instead of in script so that the devtools crate can be
//! modified independently of the rest of Servo.
#![crate_name = "style_traits"]
#![crate_type = "rlib"]
#![feature(custom_derive)]<|fim▁hole|>
#[macro_use]
extern crate cssparser;
extern crate euclid;
extern crate rustc_serialize;
extern crate serde;
extern crate util;
#[macro_use]
pub mod values;
pub mod viewport;
use cssparser::{Parser, SourcePosition};
pub trait ParseErrorReporter {
fn report_error(&self, input: &mut Parser, position: SourcePosition, message: &str);
fn clone(&self) -> Box<ParseErrorReporter + Send + Sync>;
}<|fim▁end|> | #![feature(plugin)]
#![plugin(serde_macros)]
#![plugin(plugins)]
#![deny(unsafe_code)] |
<|file_name|>CognateParser.py<|end_file_name|><|fim▁begin|>import re
from nexusmaker.tools import natsort
is_combined_cognate = re.compile(r"""(\d+)([a-z]+)""")
class CognateParser(object):
UNIQUE_IDENTIFIER = "u_"
def __init__(self, strict=True, uniques=True, sort=True):
"""
Parses cognates.
- strict (default=True): remove dubious cognates (?)
- uniques (default=True): non-cognate items get unique states
- sort (default=True): normalise ordering with natsort (i.e. 2,1 => 1,2)
"""
self.uniques = uniques
self.strict = strict
self.sort = sort
self.unique_id = 0
def is_unique_cognateset(self, cog, labelled=False):
if not labelled:
return str(cog).startswith(self.UNIQUE_IDENTIFIER)
else:
return "_%s" % self.UNIQUE_IDENTIFIER in str(cog)
def _split_combined_cognate(self, cognate):
m = is_combined_cognate.findall(cognate)
return [m[0][0], cognate] if m else [cognate]
def get_next_unique(self):
if not self.uniques:
return []
self.unique_id = self.unique_id + 1
return ["%s%d" % (self.UNIQUE_IDENTIFIER, self.unique_id)]
def parse_cognate(self, value):
raw = value
if value is None:
return self.get_next_unique()
elif value == '':
return self.get_next_unique()
elif str(value).lower() == 's': # error
return self.get_next_unique()
elif 'x' in str(value).lower(): # error
return self.get_next_unique()
elif isinstance(value, str):
if value.startswith(","):
raise ValueError("Possible broken combined cognate %r" % raw)
if value.endswith("-"):
raise ValueError("Possible broken combined cognate %r" % raw)
elif ';' in value:
raise ValueError("Possible broken combined cognate %r" % raw)
value = value.replace('.', ',').replace("/", ",")
# parse out subcognates
value = [
self._split_combined_cognate(v.strip()) for v in value.split(",")
]
value = [item for sublist in value for item in sublist]
if self.strict:
# remove dubious cognates
value = [v for v in value if '?' not in v]
# exit if all are dubious, setting to unique state
if len(value) == 0:
return self.get_next_unique()
else:
value = [v.replace("?", "") for v in value]
# remove any empty things in the list
value = [v for v in value if len(v) > 0]
if self.sort:
value = natsort(value)<|fim▁hole|> else:
raise ValueError("%s" % type(value))<|fim▁end|> | return value |
<|file_name|>2095_tokyo.js<|end_file_name|><|fim▁begin|>function init() {
em.setProperty("state", "0");
em.setProperty("leader", "true");
}
function setup(eim, leaderid) {
em.setProperty("state", "1");
em.setProperty("leader", "true");
var eim = em.newInstance("2095_tokyo" + leaderid);
var map = eim.createInstanceMap(802000311);
map.killAllMonsters(false);
eim.startEventTimer(1200000); // 20 min
return eim;
}
function playerEntry(eim, player) {
var map = eim.getMapInstance(0);
player.changeMap(map, map.getPortal(0));
}
function playerRevive(eim, player) {
return false;<|fim▁hole|>}
function scheduledTimeout(eim) {
if (eim.disposeIfPlayerBelow(100, 802000312)) {
em.setProperty("state", "0");
em.setProperty("leader", "true");
}
}
function changedMap(eim, player, mapid) {
if (mapid != 802000311 && mapid != 802000313 && mapid == 802000312) {
eim.unregisterPlayer(player);
if (eim.disposeIfPlayerBelow(0, 0)) {
em.setProperty("state", "0");
em.setProperty("leader", "true");
}
}
}
function playerDisconnected(eim, player) {
return 0;
}
function monsterValue(eim, mobId) {
return 1;
}
function playerExit(eim, player) {
eim.unregisterPlayer(player);
if (eim.disposeIfPlayerBelow(0, 0)) {
em.setProperty("state", "0");
em.setProperty("leader", "true");
}
}
function end(eim) {
eim.disposeIfPlayerBelow(100, 0);
em.setProperty("state", "0");
em.setProperty("leader", "true");
}
function clearPQ(eim) {
end(eim);
}
function allMonstersDead(eim) {}
function leftParty (eim, player) {}
function disbandParty (eim) {}
function playerDead(eim, player) {}
function cancelSchedule() {}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .array_ import array # noqa: F401
from .base import ( # noqa: F401
ExtensionArray,
ExtensionOpsMixin,
ExtensionScalarOpsMixin,
)
from .categorical import Categorical # noqa: F401
from .datetimes import DatetimeArray # noqa: F401
from .integer import IntegerArray, integer_array # noqa: F401
from .interval import IntervalArray # noqa: F401<|fim▁hole|>from .numpy_ import PandasArray, PandasDtype # noqa: F401
from .period import PeriodArray, period_array # noqa: F401
from .sparse import SparseArray # noqa: F401
from .timedeltas import TimedeltaArray # noqa: F401<|fim▁end|> | |
<|file_name|>database.py<|end_file_name|><|fim▁begin|>""" Database-related functionality for Minos. """<|fim▁hole|>
db = SQLAlchemy()
class SonosConfig(db.Model):
""" Database-class that contains the configuration for Sonos funcionality. """
__tablename__ = 'sonos_config'
key = db.Column(db.String, nullable=False, primary_key=True)
value = db.Column(db.String)
_type = db.Column('type', db.String)
class OAuthConfig(db.Model):
""" Configuration of OAuth providers. """
__tablename__ = 'oauth_settings'
id = db.Column(db.Integer, primary_key=True)
provider_name = db.Column(db.String, nullable=False, index=True)
key = db.Column(db.String, nullable=False)
value = db.Column(db.String, nullable=False)
__table_args__ = (db.UniqueConstraint('provider_name', 'key', name='oauth_settings_provider_key_uq'),)
# Track user roles in a table.
user_roles = db.Table(
'user_roles',
db.Column(
'user_id',
db.Integer,
db.ForeignKey('users.id', ondelete='CASCADE'),
primary_key=True
),
db.Column(
'role_id',
db.Integer,
db.ForeignKey('roles.id', ondelete='CASCADE'),
primary_key=True
)
)
class Role(db.Model):
""" A role represents a type of user in the system.
Roles do no support inheritence and are simply flat permission classes
instead of a hierarchy.
"""
__tablename__ = 'roles'
# Columns.
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True, nullable=False)
# Relationships and constraints.
users = db.relationship(
'User',
secondary=user_roles,
back_populates='roles'
)
class User(db.Model):
__tablename__ = 'users'
# Columns.
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(length=50), unique=True, nullable=False)
provider = db.Column(db.String, nullable=False)
provider_token = db.Column(db.String, nullable=False)
provider_token_secret = db.Column(db.String, nullable=False)
# Relationships and constraints.
roles = db.relationship(
'Role',
secondary=user_roles,
back_populates='users'
)
@app.cache.memoize(timeout=300)
def has_role(self, role_name):
""" Check if a user has a role. """
try:
from flask import session
# If the user is not logged in, bail out right away.
if not session.get('logged_in', False):
return False
except:
pass
# If any of the role names match ours then we have that role.
return any(map(lambda r: r.name == role_name, self.roles))
class UserVote(db.Model):
__tablename__ = 'votes'
__table_args__ = (db.PrimaryKeyConstraint('uid', 'uri', name='uservotes_pk'),)
uid = db.Column(db.ForeignKey('users.id'))
uri = db.Column(db.String(), nullable=False, index=True)
speaker = db.Column(db.String, nullable=False)
direction = db.Column(db.Integer, nullable=False)
class Sessions(db.Model):
""" Session object for Flask-Session. """
__tablename__ = 'sessions'
id = db.Column(db.Integer, primary_key=True)
session_id = db.Column(db.String(256), unique=True)
data = db.Column(db.LargeBinary)
expiry = db.Column(db.DateTime)<|fim▁end|> | from flask_sqlalchemy import SQLAlchemy
from .app import app, cache |
<|file_name|>cli.js<|end_file_name|><|fim▁begin|>var pkg = require([__dirname, "..", "package"].join("/"));<|fim▁hole|>module.exports = {
initialize: function(options){
var options = _.merge(options, {
version: {
describe: "Print the Cuisine version"
}
});
yargs.help("help");
yargs.version(pkg.version, "version");
yargs.options(options).argv;
},
run: function(command){
if(_.has(commands, command)){
this.initialize(commands[command].options);
commands[command].execute(yargs.argv);
}
else{
this.initialize({});
console.log(["Subcommand ", command, " not found!"].join("'"))
}
}
}<|fim▁end|> | var _ = require("lodash");
var yargs = require("yargs");
var commands = require([__dirname, "commands"].join("/"));
|
<|file_name|>version.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | # Do not edit this file, pipeline versioning is governed by git tags
__version__=0.0.0 |
<|file_name|>UCNES_Class.py<|end_file_name|><|fim▁begin|>import numpy as np
import os
import sys
import math
#import class files
# sys.path.append('../../../')
from source import bioRead as br
from source import classify as cl
#import PyInsect for measuring similarity
#sys.path.append('../../../../')
from PyINSECT import representations as REP
from PyINSECT import comparators as CMP
from multiprocessing import Pool
import multiprocessing
# Local function
def __getSimilaritiesForIndex(setting):
i, l, S, ngg = setting # Explode
for j in range(i,l):
dTmp = sop.getSimilarityDouble(ngg[i],ngg[j])
if (math.isnan(dTmp)):
raise Exception("Invalid similarity! Check similarity implementation.")
S[i,j] = dTmp
# End local function
# If we have cached the main analysis data
if os.path.exists('SimilaritiesAndDictionaries/UCNE.npz'):
# Use them
npz = np.load('SimilaritiesAndDictionaries/UCNE.npz')
hd = npz['hd']
cd = npz['cd']
S = npz['S']
l1 = npz['l1']
l2 = npz['l2']
l = npz['l']
L = np.append(np.zeros(l1),np.ones(l2),axis=0)
print "WARNING: Using cached data!"
else:
# else start reading
sr = br.SequenceReader()
# Get Human UCNE fasta data
sr.read('./biodata/UCNEs/hg19_UCNEs.fasta')
# sr.read('./biodata/UCNEs/hg19_UCNEs-10.fasta')
hd = sr.getDictionary()
print "Gained Human Dictionary"
# Get Chicken UCNE fasta data
sr.read('./biodata/UCNEs/galGal3_UCNEs.fasta')
# sr.read('./biodata/UCNEs/galGal3_UCNEs-10.fasta')
cd = sr.getDictionary()
print "Gained Chicken Dictionary"
# Set n-gram graph analysis parameters
n=3
Dwin=2
subjectMap = {}
ngg = {}
# Get number of UNCEs (for either type of UNCE)
l1 = len(hd.keys())
l2 = len(cd.keys())
l = l1 + l2
print "Found %d human UNCEs"%(l1)
print "Found %d chicken UNCEs"%(l2)
# For every human UNCE
i = 0
for key,a in hd.iteritems():
# Assign appropriate label
subjectMap[i] = (key,'humans')
# Create corresponding graph
ngg[i] = REP.DocumentNGramGraph(n,Dwin,a)
i += 1
print "Graphs Created for Humans"
for key,b in cd.iteritems():
subjectMap[i] = (key,'chickens')
ngg[i] = REP.DocumentNGramGraph(n,Dwin,b)
i += 1
print "Graphs Created for Chickens"
S = np.empty([l, l])
L = np.empty([l])
sop = CMP.SimilarityNVS()
print "Getting human similarities..."
# TODO: Examine default (problems with locking S)
# pThreadPool = Pool(1);
qToExecute = list() # Reset tasks
for i in range(0,l1):
print i," ",
L[i] = 0 #0 for humans
qToExecute.append((i,l,S,ngg))
# pThreadPool.map(__getSimilaritiesForIndex, qToExecute)
map(__getSimilaritiesForIndex,qToExecute)<|fim▁hole|> qToExecute = list() # Reset tasks
print "Getting chicken similarities..."
for i in range(l1,l):
print i," ",
L[i] = 1 #0 for chickens
qToExecute.append((i,l,S,ngg))
# pThreadPool.map(__getSimilaritiesForIndex, qToExecute)
map(__getSimilaritiesForIndex, qToExecute)
# for i in range(l1,l):
# print i," ",
# L[i] = 1 #1 for chickens
# for j in range(i,l):
# S[i,j] = sop.getSimilarityDouble(ngg[i],ngg[j])
print ""
print "Getting chicken similarities... Done"
# Update symmetric matrix, based on current findings
for i in range(0,l):
for j in range(0,i):
S[i,j] = S[j,i]
print "Similarity matrix constructed.."
if not os.path.exists('SimilaritiesAndDictionaries'):
os.mkdir('SimilaritiesAndDictionaries')
np.savez('SimilaritiesAndDictionaries/UCNE.npz', hd=hd, cd=cd, l1=l1, l2=l2,
l=l, S=S)
reps = 10
L1 = L[0:l1]
L2 = L[l1:]
metrics = dict()
cm = dict()
class_types = {0:"No kernelization",1:"Spectrum Clip",2:"Spectrum Flip",3:"Spectrum Shift",4:"Spectrum Square"}
print "Testing for different kernelization methods..\n\n"
for i in range(0, len(class_types)):
try:
print class_types[i],"\n"
evaluator = cl.Evaluator(cl.SVM())
Sp = cl.kernelization(S,i)
S1 = Sp[0:l1,:]
S2 = Sp[l1:,:]
metrics[class_types[i]],cm[class_types[i]] = evaluator.Randomized_kfold((S1,S2),(L1,L2),reps,verbose=True)
print ""
except Exception as e:
print "Approach %s failed for reason:\n%s"%(class_types[i], str(e))
np.savez('SimilaritiesAndDictionaries/metrics.npz', metrics=metrics, cm=cm)<|fim▁end|> |
print ""
print "Getting human similarities... Done."
|
<|file_name|>ViewTradeActivityTest.java<|end_file_name|><|fim▁begin|>package com.gracehoppers.jlovas.bookwrm;
import android.app.Activity;
import android.content.Intent;
import android.test.ActivityInstrumentationTestCase2;
import android.widget.Button;
import android.widget.TextView;
import junit.framework.TestCase;
import java.util.ArrayList;
/**
* Created by ljuarezr on 11/24/15.
*/
public class ViewTradeActivityTest extends TestCase {
/*
Testing ViewTradeActivity. (No UI test yet).
Create a mock trade manually, then test that it is displayed correctly
*/
private TextView borrowerUsername, borrowerBook, ownerUsername, ownerBook, comments;
private Button complete;
public void testViewTrade(){
//First create two accounts
Account A = new Account();
Account B = new Account();
try{
A.setUsername("A");
A.setEmail("[email protected]");
A.setCity("YEG");
B.setUsername("B");
B.setEmail("[email protected]");
B.setCity("YEG");
} catch (NoSpacesException e){
} catch (TooLongException e){
} catch (IllegalEmailException e ) {
}
//Create two books to make the trade with
Book bookA = new Book();
bookA.setTitle("BookA");
bookA.setAuthor("AuthorA");
Book bookB = new Book();
bookB.setTitle("BookB");
bookB.setAuthor("AuthorB");
ArrayList<Book> borrowerBookList = new ArrayList<>();
borrowerBookList.add(bookB);
//Set up the trade
Trade trade = new Trade();
trade.setOwner(A);
trade.setBorrower(B);
trade.setBorrowerBook(borrowerBookList);
trade.setOwnerBook(bookA);
trade.setOwnerComment("Test Trade");
<|fim▁hole|> //confirm that book was added to the TradeHistory
assertTrue(A.getTradeHistory().getSize() == 1);
}
public void testComplete(){
}
}<|fim▁end|> |
//Reset the application to a known state
A.getTradeHistory().clear();
A.getTradeHistory().addTrade(trade); |
<|file_name|>test_init_code.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
from mock import patch
from polyaxon.env_vars.keys import (
POLYAXON_KEYS_GIT_CREDENTIALS,
POLYAXON_KEYS_RUN_INSTANCE,
)
from polyaxon.exceptions import PolyaxonContainerException
from polyaxon.init.git import (
create_code_repo,
get_clone_url,
has_cred_access,
has_ssh_access,
)
from polyaxon.utils.test_utils import BaseTestCase
@pytest.mark.init_mark<|fim▁hole|> create_code_repo(repo_path="", url="", revision="")
def test_raise_if_env_var_not_correct(self):
os.environ[POLYAXON_KEYS_RUN_INSTANCE] = "foo"
with self.assertRaises(PolyaxonContainerException):
create_code_repo(repo_path="", url="", revision="")
del os.environ[POLYAXON_KEYS_RUN_INSTANCE]
def test_has_cred_access(self):
assert has_cred_access() is False
os.environ[POLYAXON_KEYS_GIT_CREDENTIALS] = "foo:bar"
assert has_cred_access() is True
del os.environ[POLYAXON_KEYS_GIT_CREDENTIALS]
def test_has_ssh_access(self):
assert has_ssh_access() is False
def test_get_clone_url(self):
url = "https://foo.com/test"
assert get_clone_url(url=url) == url
os.environ[POLYAXON_KEYS_GIT_CREDENTIALS] = "foo:bar"
assert get_clone_url(url=url) == "https://foo:[email protected]/test"
del os.environ[POLYAXON_KEYS_GIT_CREDENTIALS]
with patch("polyaxon.init.git.has_ssh_access") as ssh_access_mock:
ssh_access_mock.return_value = True
assert get_clone_url(url=url) == "[email protected]:test.git"
url = "[email protected]:test.git"
with patch("polyaxon.init.git.has_ssh_access") as ssh_access_mock:
ssh_access_mock.return_value = True
assert get_clone_url(url=url) == "[email protected]:test.git"<|fim▁end|> | class TestInitCode(BaseTestCase):
def test_raise_if_env_var_not_found(self):
with self.assertRaises(PolyaxonContainerException): |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import logging
import os
from mopidy import config, ext
__version__ = '0.3.0'
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = 'Mopidy-Webhooks'
ext_name = 'webhooks'
version = __version__<|fim▁hole|>
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['api_key'] = config.String()
schema['api_key_header_name'] = config.String()
schema['status_update_interval'] = config.Integer()
schema['webhook_url'] = config.String()
return schema
def setup(self, registry):
from .frontend import WebhookFrontend
registry.add('frontend', WebhookFrontend)<|fim▁end|> | |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for ngWYSIWYG 0.6
// Project: https://github.com/psergus/ngWYSIWYG
// Definitions by: Patrick Mac Kay <https://github.com/patrick-mackay>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare namespace ngWYSIWYG {
interface Toolbar {
name: string;
items: string[];
}<|fim▁hole|> sanitize: boolean;
toolbar?: Toolbar[];
}
}<|fim▁end|> |
interface Config { |
<|file_name|>plants.js<|end_file_name|><|fim▁begin|>PLANT_CONFIG = [
{key: 'name', label: 'Name'},
{key: 'scienceName', label: 'Scientific name'}
];
Template.plants.helpers({
plantListConfig: function() {
return PLANT_CONFIG;
}
});
Template.newPlant.helpers({
plantListConfig: function() {
return PLANT_CONFIG;
}
});
Template.newPlant.events({
'submit .newPlantForm': function(event) {
event.preventDefault();
var data = {name:'',scienceName:''};
PLANT_CONFIG.forEach(function(entry){
var $input = $(event.target).find("[name='" + entry.key + "']");
if($input.val()) {
data[entry.key] = $input.val();
}
});
Meteor.call('createPlant', data);
PLANT_CONFIG.forEach(function(entry){
$(event.target).find("[name='" + entry.key + "']").val('');
});
}
});<|fim▁hole|>Template.plantListItem.events({
'click .plant-delete': function(){
Meteor.call('deletePlant', this._id);
}
});<|fim▁end|> | |
<|file_name|>jsonZipper.js<|end_file_name|><|fim▁begin|>/*
Author: Gerard Lamusse
Created: 08/2015
Version: 1.0
URL: https://github.com/u12206050/jsonZipper
*/
var jsonZipper = (function(){
var jz = function(_jsonObj, _options) {
var Z = this;
var MAP = [];
var opts = _options && typeof(_options) !== "boolean" ? _options : {};
/* Public Functions */
Z.zip = function() {
if (Z.status === "zipable") {
Z.uzOpts = {I:[],A:Z.isArray,eC:[],iC:[]};
if (Z.isArray) {
var x = 0;
var y = Z.JO.length;
while (x < y) {
compress(Z.JO[x++]);
}
} else {
compress(Z.JO);
}
Z.status = "zipped";
return {M:MAP,D:Z.JO,O:Z.uzOpts};
} return false;
};
Z.unzip = function() {
if (Z.status === "unzipable") {
if (Z.isArray) {
var x = 0;
var y = Z.JO.length;
while (x < y) {
extract(Z.JO[x++]);
}
} else {
extract(Z.JO);
}
Z.status = "unzipped";
return Z.JO;
} return false;
};
Z.compress = function(obj) {
if (Z.status === "compressing") {
Z.JO.push(obj);
compress(obj);
} else if (Z.status === "ready to load object") {
Z.isArray = true;
Z.uzOpts = {I:[],A:Z.isArray,eC:[],iC:[]};
Z.status = "compressing";
Z.JO = [];
Z.JO.push(obj);
compress(obj);
} else return false;
return {M:MAP,D:Z.JO,O:Z.uzOpts};
};
var prevExtractIndex = false;
var extracted = [];
Z.extract = function(i) {
if (Z.status === "unzipable" || Z.status === "zipped") {
if (extracted.indexOf(i) > -1) {
prev = Z.JO[i];
} else {
if (!prevExtractIndex || prevExtractIndex+1 !== i) {
setPrev(i);
}
extract(Z.JO[i]);
extracted.push(i);
}
prevExtractIndex = i;
}
return Z.JO[i];
};
Z.length = function() {
return JSON.stringify(Z.JO).length + (MAP ? JSON.stringify(MAP).length : 0);
};
Z.options = function(opts,isArray) {
/* []: An array of key names that will be used as identifiers.
WARGING: Should be within every object, but repeating, NO Booleans or Integers allowed.
Hint: Most common values that can be guessed/used from previous objects */
Z.identifiers = opts.identifiers || [];
/* boolean: If _jsonObj is an array or not */
Z.isArray = opts.isArray || isArray;
/* []: An array of key names not to map or zip */
Z.exclude = opts.exclude || [];
/* []: An array of key names which values to include in mapping will need identifiers */
Z.include = opts.include || [];
/* []: An array of key names to be removed from the object */
Z.remove = opts.remove || false;
/* {}: An object containing key(s) to add, with function(s) which return the value */
Z.add = opts.add || false;
<|fim▁hole|> }
Z.load = function(_jsonObj, isJZobj) {
Z.startLength = 0;
MAP = [];
try {
var stringIT = JSON.stringify(_jsonObj);
Z.startLength = stringIT.length;
Z.JO = JSON.parse(stringIT);
}
catch (err) {
throw "The json object has recursive references or is too big to load into memory";
}
Z.status = "zipable";
if (isJZobj) {
if (Z.JO.D && Z.JO.O && Z.JO.M) {
MAP = Z.JO.M;
Z.identifiers = Z.JO.O.I || [];
Z.isArray = Z.JO.O.A;
Z.exclude = Z.JO.O.eC || false;
Z.include = Z.JO.O.iC || false;
Z.JO = Z.JO.D;
Z.remove = false;
Z.add = false;
Z.status = "unzipable";
} else
Z.options(isJZobj,_jsonObj.constructor === Array);
}
prev = false;
prevID = false;
};
/* Private Functions */
var getID = function(key, value) {
var mI = MAP.indexOf(key);
if (mI < 0) {
if (value) {
return MAP.push(key) - 1;
}
if (Z.exclude.indexOf(key) > -1) {
Z.uzOpts.eC.push(key);
return key;
} else {
mI = MAP.push(key) - 1;
if (Z.identifiers.indexOf(key) > -1) {
Z.uzOpts.I.push(mI);
}
if (Z.include.indexOf(key) > -1) {
Z.uzOpts.iC.push(mI);
}
}
}
return mI;
};
/* Compress the given object, taking note of the previous object */
var prev = false;
var prevID = false;
var compress = function(J) {
add(J);
var keys = Object.keys(J);
var prevSame = prev ? true : false;
var id = '';
var i=0;
for (xend=Z.identifiers.length; i<xend; i++) {
var ikey = Z.identifiers[i];
J[ikey] = getID(J[ikey],1);
id += J[ikey];
}
if (!prevSame || !prevID || prevID !== id) {
prevSame = false;
prev = J;
prevID = id;
}
i=0;
for (iend=keys.length; i<iend; i++) {
var key = keys[i];
if (Z.remove && Z.remove.indexOf(key) > -1)
delete J[key];
else {
var mI = getID(key);
if (prevSame && (MAP[prev[mI]] === J[key] || prev[mI] === J[key]))
delete J[key];
else if (Z.include.indexOf(key) > -1) {
if (Z.identifiers.indexOf(key) > -1)
J[mI] = J[key];
else J[mI] = getID(J[key],1);
delete J[key];
} else if (mI !== key) {
J[mI] = J[key];
delete J[key];
}
}
}
};
/* Extract the given object, taking note of the previous object */
var extract = function(J) {
if (J === prev)
return;
add(J);
var prevSame = Z.isArray ? isSame(prev, J) : false;
var keys = Object.keys(J);
if (prevSame)
extend(prev,J);
else if (Z.identifiers) {
var x=0;
for (xend=Z.identifiers.length; x<xend; x++) {
var ikey = Z.identifiers[x];
J[ikey] = MAP[J[ikey]];
}
}
var i=0;
for (iend=keys.length; i<iend; i++) {
var key = keys[i]*1;
var value = J[key];
if (Z.remove && Z.remove.indexOf(key) > -1)
delete J[key];
else {
if (Z.exclude.indexOf(key) > -1) {
J[key] = J[key];
if (Z.include.indexOf(key) > -1)
J[key] = MAP[J[key]];
} else {
if (Z.include.indexOf(key) > -1)
J[MAP[key]] = MAP[J[key]];
else
J[MAP[key]] = J[key];
delete J[key];
}
}
}
prev = J;
};
/* Add the additional keys and values to the given object */
var add = function(J) {
if (Z.add) {
for (var key in Z.add) {
if('undefined' !== typeof Z.add[key]){
if (typeof(Z.add[key]) === "function")
J[key] = Z.add[key](J);
else
J[key] = Z.add[key];
}
}
}
};
/* Set the previous full object from the current index, incl. */
var setPrev = function(i) {
if (i > 0) {
var x=0;
for (xend=Z.identifiers.length; x<xend; x++) {
if ('undefined' === typeof Z.JO[i][Z.identifiers[x]]) {
setPrev(i-1);
return;
}
}
extract(Z.JO[i]);
} else
extract(Z.JO[0]);
};
/* Checks if identiifiers match */
var isSame = function(obj1, obj2) {
if (Z.identifiers && obj1 && obj2 && obj1 !== obj2) {
var x=0;
for (xend=Z.identifiers.length; x<xend; x++) {
var key = Z.identifiers[x];
var mKey = MAP[Z.identifiers[x]];
if ('undefined' === typeof obj1[mKey] || ('undefined' !== typeof obj2[key] && MAP[obj2[key]] !== obj1[mKey]))
return false;
}
} else return false;
return true;
};
/* Merges an object by reference into the first one, replacing values from the second object into the first if duplicate keys exist */
var merge = function(obj1,obj2) {
for (var key in obj2) {
if('undefined' !== typeof obj2[key]) {
obj1[key] = obj2[key];
}
}
};
/* Adds all keys and values from the base to obj2 for each key that does not exist in obj2 */
var extend = function(base,obj2) {
for (var key in base) {
if('undefined' === typeof obj2[key]) {
obj2[key] = base[key];
}
}
};
Z.setID = opts.setID || false;
Z.options(opts,_jsonObj.constructor === Array)
Z.status = "ready to load object";
/* Check if object is given and if options is object or 'compressed' flag */
if (_jsonObj && typeof(_jsonObj) === "object") {
/* When unzipping an object ensure _options is true and not an object, once loaded, you can set the options */
if (_options && typeof(_options) === "boolean") {
Z.load(_jsonObj,true);
} else {
Z.load(_jsonObj,false);
}
}
};
return jz;
})();<|fim▁end|> | |
<|file_name|>description.js<|end_file_name|><|fim▁begin|>module.exports = {
getMeta: function(meta) {<|fim▁hole|> d = d[0];
}
return {
description: d
}
}
};<|fim▁end|> |
var d = meta.metaDescription || meta.description || meta.Description;
if (d && d instanceof Array) { |
<|file_name|>train_util.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Training helper functions that are shared across tasks."""
import contextlib
import functools
import operator
import signal
import typing
from typing import Any, Callable, Dict, Iterable, Optional, Sequence, Tuple, Union
from absl import logging
import dataclasses
import flax
import gin
import jax
import jax.numpy as jnp
import numpy as np
import optax
from gfsa import jax_util
from gfsa.datasets import data_loading
@jax_util.register_dataclass_pytree
@dataclasses.dataclass
class ExampleWithMetadata:
"""Stores an example or batch of examples.
Attributes:
epoch: Integer representing the epoch that this example comes from.
example_id: Integer ID uniquely identifying this example in the dataset.
example: The example itself.
mask: Array that is True for actual examples, False for padding.
static_metadata: Metadata about this example or batch that should result in
a new `jit` XLA computation (i.e. padded shapes).
"""
epoch: Any
example_id: Any
example: Any
mask: jax_util.NDArray = np.array(True)
static_metadata: Any = None
@jax_util.register_dataclass_pytree
@dataclasses.dataclass
class RatioMetric:
"""A ratio, where numerator and denominator should be summed separately.
Attributes:
numerator: Numerator of the metric.
denominator: Denominator of the metric.
"""
numerator: jax_util.NDArray
denominator: jax_util.NDArray
MetricValue = Union[float, jax_util.NDArray, RatioMetric]
# A loss function is a callable (model, example, static_metadata)
# -> (loss, metrics)
# pyformat: disable
LossFunWithMetrics = Callable[
[Any, Any, Any],
Tuple[jax_util.NDArray, Dict[str, MetricValue]]]
# pyformat: enable
# A validation function is a callable (replicated_model) -> (objective, metrics)
# where model is a tree of ShardedDeviceArrays, and objective is the value we
# want to make decrease.
ValidationFunction = Callable[[Any], Tuple[float, Dict[str, MetricValue]]]
def device_broadcast(x, num_devices):
"""Broadcast a value to all devices."""
return jax.pmap(lambda _: x)(jnp.arange(num_devices))
def _parallel_train_step(
optimizer,
batched_examples,
static_batch_metadata,
loss_fn,
max_global_norm = None,
**optimizer_hyper_params,
):
"""Train the model for one step in parallel across devices.
Args:
optimizer: Optimizer that tracks the model and parameter state. Should be
replicated to each device, i.e. should contain ShardedDeviceArrays with a
leading axis (num_devices, ...) but with the same content on each device.
batched_examples: A structure of NDArrays representing a batch of examples.
Should have two leading batch dimensions: (num_devices,
batch_size_per_device, ...)
static_batch_metadata: Metadata about this batch, which will be shared
across all batched examples. Each value of this results in a separate
XLA-compiled module.
loss_fn: Task-specific non-batched loss function to apply. Should take the
current model (optimizer.target) and an example from batched_examples, and
return a tuple of the current loss (as a scalar) and a dictionary from
string names to metric values (also scalars, or RatioMetrics).
max_global_norm: Maximum global norm to clip gradients to. Should be a
scalar, which will be broadcast automatically.
**optimizer_hyper_params: Hyperparameters to pass to the optimizer's
`apply_gradient` function, which will be broadcast across devices
automatically.
Returns:
Tuple (updated_optimizer, grads_ok, metrics). Metrics will be as returned by
loss_fn, with an extra elements "loss". All metrics will be averaged
across all elements of the batch. Both optimizer and metrics will contain
ShardedDeviceArrays that are identical across devices. grads_ok will be
a replicated bool ndarray that is True if the gradients were finite.
"""
def batched_loss_fn(model):
"""Apply loss function across a batch of examples."""
loss, metrics = jax.vmap(loss_fn, (None, 0, None))(model, batched_examples,
static_batch_metadata)
return jnp.mean(loss), metrics
# Compute gradients of loss, along with metrics.
(loss, metrics), grads = jax.value_and_grad(
batched_loss_fn, has_aux=True)(
optimizer.target)
metrics["loss"] = loss
# Exchange average gradients and metrics across devices.
agg_grads = jax.lax.pmean(grads, "devices")
agg_metrics = {}
for k, v in metrics.items():
if isinstance(v, RatioMetric):
num = jax.lax.psum(jnp.sum(v.numerator), "devices")
denom = jax.lax.psum(jnp.sum(v.denominator), "devices")
new_value = num / denom
else:
# Use nanmean to aggregate bare floats.
new_value = jnp.nanmean(jax.lax.all_gather(v, "devices"))
agg_metrics[k] = new_value
# Compute global norm and possibly clip.
global_norm = optax.global_norm(agg_grads)
agg_metrics["gradient_global_norm"] = global_norm
if max_global_norm is not None:
should_clip = global_norm > max_global_norm
agg_grads = jax.tree_map(
lambda g: jnp.where(should_clip, g * max_global_norm / global_norm, g),
agg_grads)
agg_metrics["gradient_was_clipped"] = should_clip.astype("float32")
# Check for non-finite gradients.
grads_ok = jnp.all(
jnp.stack([jnp.all(jnp.isfinite(x)) for x in jax.tree_leaves(agg_grads)]))
# Apply updates.
updated_optimizer = optimizer.apply_gradient(agg_grads,
**optimizer_hyper_params)
return updated_optimizer, grads_ok, agg_metrics, agg_grads
def _build_parallel_train_step():
"""Builds an accelerated version of the train step function."""
# We need to wrap and unwrap so that the final function can be called with
# keyword arguments, but we still maintain the proper axes.
@functools.partial(
jax.pmap,
axis_name="devices",
in_axes=(0, 0, None, None, None, None),
static_broadcasted_argnums=(2, 3))
def wrapped(optimizer, batched_examples, static_batch_metadata, loss_fn,
max_global_norm, optimizer_hyper_params):
return _parallel_train_step(optimizer, batched_examples,
static_batch_metadata, loss_fn, max_global_norm,
**optimizer_hyper_params)
@functools.wraps(_parallel_train_step)
def wrapper(optimizer, batched_examples, static_batch_metadata, loss_fn,
max_global_norm, **optimizer_hyper_params):
return wrapped(optimizer, batched_examples, static_batch_metadata, loss_fn,
max_global_norm, optimizer_hyper_params)
return wrapper
# The primary version of the training step, with the associated jit cache.
parallel_train_step = _build_parallel_train_step()
def warmup_train_step(
optimizer,
batched_example,
static_batch_metadata,
loss_fn,
optimizer_is_replicated = False,
profile = False,
runner=None,
):
"""Run a fake train step to warm up JIT cache.
Args:
optimizer: Optimizer that tracks the model and parameter state.
batched_example: A structure of NDArrays representing a batch of examples.
static_batch_metadata: Metadata about the batch, which will be shared across
all batched examples.
loss_fn: Task-specific non-batched loss function to apply. Should take the
current model (optimizer.target) and an example from batched_examples, and
return a tuple of the current loss (as a scalar) and a dictionary from
string names to metric values (also scalars).
optimizer_is_replicated: Whether optimizer is already replicated.
profile: Whether to enable profiling during warmup.
runner: If profile=True, the runner to use when profiling.
"""
num_devices = jax.local_device_count()
if optimizer_is_replicated:
replicated_optimizer = optimizer
else:
replicated_optimizer = device_broadcast(optimizer, num_devices)
(replicated_optimizer,
batched_example) = jax.tree_map(jax.device_put,
(replicated_optimizer, batched_example))
try:
max_global_norm = gin.query_parameter(
"train_util.training_loop.max_global_norm")
except ValueError:
max_global_norm = None
def go():
# Note that value for learning_rate is arbitrary, but we pass it here to
# warm up the jit cache (since we are passing a learning rate at training
# time).
res = parallel_train_step(
replicated_optimizer,
batched_example,
static_batch_metadata,
loss_fn,
max_global_norm=max_global_norm,
learning_rate=0.0)
jax.tree_map(lambda x: x.block_until_ready(), res)
if profile:
stats = runner.try_run_and_profile(go, catch_resource_exhausted=False)
logging.info("Warmed up train step with stats: %s", stats)
else:
go()
logging.info("Warmed up train step")
def build_averaging_validator(
loss_fn,
valid_iterator_factory,
objective_metric_name = None,
include_total_counts = False,
prefetch = True,
):
"""Validate by computing averages over a validation set.
Args:
loss_fn: Loss function for the task.
valid_iterator_factory: Constructs iterators of batched examples from the
validation set, with two batch axes. To iterate over a fixed part of the
validation set, consider using build_one_pass_iterator_factory. To
randomly sample from a validation set, you can use something like
`lambda: itertools.islice(validation_iterator, num_batches)`.
objective_metric_name: Name of the metric that is the objective value.
include_total_counts: Whether to report numerator and denominator separately
for RatioMetric objects, along with the "validation_total_example_count"
metric.
prefetch: Whether to prefetch validation examples.
Returns:
Validation function that runs loss_fn and aggregates the results, reporting
the loss as the objective, and using sum to accumulate metrics.
"""
if objective_metric_name is None:
objective_metric_name = "loss"
@functools.partial(
jax.pmap, axis_name="devices", static_broadcasted_argnums=3)
def parallel_metrics_batch(model, batched_examples, batch_mask,
static_metadata):
loss, metrics = jax.vmap(loss_fn, (None, 0, None))(model, batched_examples,
static_metadata)
metrics["loss"] = loss
metrics = jax.tree_map(
lambda x: jnp.where(batch_mask, x, jnp.zeros_like(x)), metrics)
metrics = jax.tree_map(lambda x: jax.lax.psum(jnp.sum(x), "devices"),
metrics)
return metrics
def validation_function(model):
with contextlib.ExitStack() as exit_stack:
valid_iterator = valid_iterator_factory()
if prefetch:
valid_iterator = exit_stack.enter_context(
data_loading.ThreadedPrefetcher(valid_iterator, 4))
accumulated = None
example_count = 0
for batch in valid_iterator:
results = parallel_metrics_batch(model, batch.example, batch.mask,
batch.static_metadata)
metrics = jax.tree_map(float, flax.jax_utils.unreplicate(results))
metrics["epoch"] = np.sum(batch.epoch)
if accumulated is None:
accumulated = metrics
else:
accumulated = jax.tree_multimap(operator.add, accumulated, metrics)
example_count += jnp.count_nonzero(batch.mask)
assert example_count > 0, "Validation iterator must be nonempty"
accumulated = typing.cast(Dict[str, Any], accumulated)
final_metrics = {}
for k, v in accumulated.items():
if isinstance(v, RatioMetric):
final_metrics[k] = v.numerator / v.denominator
if include_total_counts:
final_metrics[k + "_numerator"] = v.numerator
final_metrics[k + "_denominator"] = v.denominator
else:
final_metrics[k] = v / example_count
objective = final_metrics[objective_metric_name]
if include_total_counts:
final_metrics["validation_total_example_count"] = example_count
return (objective, final_metrics)
return validation_function
@contextlib.contextmanager
def catch_interrupts_once(callback,
catch_signals = (signal.SIGINT,
signal.SIGABRT)):
# pylint: disable=g-doc-return-or-yield
"""Context manager to catch interrupt signals.
Only catches the first signal sent, so that repeated interrupts will kill the
job as normal.
Args:
callback: Function to run when the signal is caught the first time.
catch_signals: Signals to catch.
Returns:
A context manager that will catch interrupts inside the block.
"""
# pylint: enable=g-doc-return-or-yield
known_signals = {
signal.SIGINT: "SIGINT",
signal.SIGABRT: "SIGABRT",
}
def _handler(signal_number, frame):
del frame # Unused.
logging.warning("Caught interrupt signal %s",
known_signals.get(signal_number, signal_number))
callback(signal_number)<|fim▁hole|> _restore_handlers()
original_handlers = {}
for signal_number in catch_signals:
original_handlers[signal_number] = signal.signal(signal_number, _handler)
already_restored = False
def _restore_handlers():
nonlocal already_restored
if already_restored:
return
else:
already_restored = True
for signal_number in catch_signals:
current_handler = signal.signal(signal_number,
original_handlers[signal_number])
if current_handler is not _handler:
logging.error(
"Unexpected active signal handler %s for %s; "
"expected the signal hander from "
"`catch_interrupts_once`! Restored to %s anyways.",
current_handler, known_signals.get(signal_number, signal_number),
original_handlers[signal_number])
try:
yield
finally:
_restore_handlers()<|fim▁end|> | |
<|file_name|>select_handler.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
///
/// Implementation of the callbacks that the CSS selector engine uses to query the DOM.
///
use dom::node::AbstractNode;
use newcss::select::SelectHandler;
use core::str::eq_slice;
pub struct NodeSelectHandler {
node: AbstractNode
}
fn with_node_name<R>(node: AbstractNode, f: &fn(&str) -> R) -> R {
if !node.is_element() {
fail!(~"attempting to style non-element node");
}
do node.with_imm_element |element_n| {
f(element_n.tag_name)
}
}
impl SelectHandler<AbstractNode> for NodeSelectHandler {
fn with_node_name<R>(&self, node: &AbstractNode, f: &fn(&str) -> R) -> R {
with_node_name(*node, f)
}
fn named_parent_node(&self, node: &AbstractNode, name: &str) -> Option<AbstractNode> {
match node.parent_node() {
Some(parent) => {
do with_node_name(parent) |node_name| {
if eq_slice(name, node_name) {
Some(parent)
} else {
None
}
}
}
None => None
}
}
fn parent_node(&self, node: &AbstractNode) -> Option<AbstractNode> {<|fim▁hole|>
// TODO: Use a Bloom filter.
fn named_ancestor_node(&self, node: &AbstractNode, name: &str) -> Option<AbstractNode> {
let mut node = *node;
loop {
let parent = node.parent_node();
match parent {
Some(parent) => {
let mut found = false;
do with_node_name(parent) |node_name| {
if eq_slice(name, node_name) {
found = true;
}
}
if found {
return Some(parent);
}
node = parent;
}
None => return None
}
}
}
fn node_is_root(&self, node: &AbstractNode) -> bool {
self.parent_node(node).is_none()
}
fn with_node_id<R>(&self, node: &AbstractNode, f: &fn(Option<&str>) -> R) -> R {
if !node.is_element() {
fail!(~"attempting to style non-element node");
}
do node.with_imm_element() |element_n| {
f(element_n.get_attr("id"))
}
}
fn node_has_id(&self, node: &AbstractNode, id: &str) -> bool {
if !node.is_element() {
fail!(~"attempting to style non-element node");
}
do node.with_imm_element |element_n| {
match element_n.get_attr("id") {
None => false,
Some(existing_id) => id == existing_id
}
}
}
}<|fim▁end|> | node.parent_node()
} |
<|file_name|>domparser.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::DocumentBinding::DocumentReadyState;
use dom::bindings::codegen::Bindings::DOMParserBinding;
use dom::bindings::codegen::Bindings::DOMParserBinding::DOMParserMethods;
use dom::bindings::codegen::Bindings::DOMParserBinding::SupportedType::{Text_html, Text_xml};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::document::{Document, DocumentHelpers, IsHTMLDocument};<|fim▁hole|>use parse::html::{HTMLInput, parse_html};
use util::str::DOMString;
use std::borrow::ToOwned;
#[dom_struct]
pub struct DOMParser {
reflector_: Reflector,
window: JS<Window>, //XXXjdm Document instead?
}
impl DOMParser {
fn new_inherited(window: JSRef<Window>) -> DOMParser {
DOMParser {
reflector_: Reflector::new(),
window: JS::from_rooted(window),
}
}
pub fn new(window: JSRef<Window>) -> Temporary<DOMParser> {
reflect_dom_object(box DOMParser::new_inherited(window), GlobalRef::Window(window),
DOMParserBinding::Wrap)
}
pub fn Constructor(global: GlobalRef) -> Fallible<Temporary<DOMParser>> {
Ok(DOMParser::new(global.as_window()))
}
}
impl<'a> DOMParserMethods for JSRef<'a, DOMParser> {
// http://domparsing.spec.whatwg.org/#the-domparser-interface
fn ParseFromString(self,
s: DOMString,
ty: DOMParserBinding::SupportedType)
-> Fallible<Temporary<Document>> {
let window = self.window.root();
let url = window.r().get_url();
let content_type = DOMParserBinding::SupportedTypeValues::strings[ty as usize].to_owned();
match ty {
Text_html => {
let document = Document::new(window.r(), Some(url.clone()),
IsHTMLDocument::HTMLDocument,
Some(content_type),
None,
DocumentSource::FromParser).root();
parse_html(document.r(), HTMLInput::InputString(s), &url);
document.r().set_ready_state(DocumentReadyState::Complete);
Ok(Temporary::from_rooted(document.r()))
}
Text_xml => {
//FIXME: this should probably be FromParser when we actually parse the string (#3756).
Ok(Document::new(window.r(), Some(url.clone()),
IsHTMLDocument::NonHTMLDocument,
Some(content_type),
None,
DocumentSource::NotFromParser))
}
}
}
}<|fim▁end|> | use dom::document::DocumentSource;
use dom::window::{Window, WindowHelpers}; |
<|file_name|>test_scrooge_gen_integration.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).<|fim▁hole|>
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class ScroogeGenTest(PantsRunIntegrationTest):
@classmethod
def hermetic(cls):
return True
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
full_config = {
'GLOBAL': {
'pythonpath': ["%(buildroot)s/contrib/scrooge/src/python"],
'backend_packages': ["pants.backend.codegen", "pants.backend.jvm", "pants.contrib.scrooge"]
},
}
if config:
for scope, scoped_cfgs in config.items():
updated = full_config.get(scope, {})
updated.update(scoped_cfgs)
full_config[scope] = updated
return super(ScroogeGenTest, self).run_pants(command, full_config, stdin_data, extra_env,
**kwargs)
@staticmethod
def thrift_test_target(name):
return 'contrib/scrooge/tests/thrift/org/pantsbuild/contrib/scrooge/scrooge_gen:' + name
def test_good(self):
# scrooge_gen should pass with correct thrift files.
cmd = ['gen', self.thrift_test_target('good-thrift')]
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
def test_namespace_map(self):
# scrooge_gen should pass with namespace_map specified
cmd = ['gen', self.thrift_test_target('namespace-map-thrift')]
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
def test_default_java_namespace(self):
# scrooge_gen should pass with default_java_namespace specified
cmd = ['gen', self.thrift_test_target('default-java-namespace-thrift')]
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)
def test_include_paths(self):
# scrooge_gen should pass with include_paths specified
cmd = ['gen', self.thrift_test_target('include-paths-thrift')]
pants_run = self.run_pants(cmd)
self.assert_success(pants_run)<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (see LICENSE). |
<|file_name|>game.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
* AUTHOR: Alexander Casal
* FILE: game.cpp
* DESCRIPTION: Demonstrate the use of the Strategy Design Pattern through
* a very simple game which allows the user to select different
* weapons. Each weapon has a different characteristic, changing
* the way the player attacks.
*****************************************************************************/
#include "game.h"
#include "largesword.h"
#include "smallsword.h"
#include "bow.h"
#include <memory>
#include <limits>
#include <iostream>
/**
* Constructor
*
* When we create the game we initialize playing to true so our game
* loop runs.
*/
Game::Game()
{
playing = true;
}
/**
* play
*
* Control the game loop allowing the player to select various
* options. The function changeWeapon allows us to use the strategy
* pattern to change which strategy we are using at runtime.
*/
void Game::play()
{
std::cout << "Welcome to the Strategy Pattern Game!\n";
showOptions();
while (playing)
{
switch (getOption())
{
case 1:
player.changeWeapon(std::make_unique<LargeSword>());
break;
case 2:
player.changeWeapon(std::make_unique<SmallSword>());
break;
case 3:
player.changeWeapon(std::make_unique<Bow>());
break;
case 4:
player.attack();
break;
case 5:
playing = false;
break;<|fim▁hole|> }
}
}
/**
* getOption
*
* Prompt the user to enter an option and retrieve it. We do some simple
* error checking to validate the user has entered a value between 1 and 5
* inclusive.
*/
int Game::getOption()
{
bool valid = false;
int input = 0;
do
{
std::cout << "> ";
// Validate the user has entered valid data
if (std::cin >> input)
{
valid = true;
if (input <= 0 || input > 5)
{
std::cout << "Please enter an option between 1 and 5\n";
valid = false;
}
}
else
{
std::cin.clear();
std::cin.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
std::cout << "Plese enter an option between 1 and 5\n";
}
} while (!valid);
return input;
}
/**
* showOptions
*
* Display a list of possible options the user can input for their next
* action.
*/
void Game::showOptions()
{
std::cout << "\nPlease type an option from the menu (ex: > 4)\n";
std::cout << "1.) Select large sword\n2.) Select small sword\n";
std::cout << "3.) Select bow\n4.) Attack\n5.) Quit\n";
}<|fim▁end|> | |
<|file_name|>subtitle.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import codecs
import logging
import os
import chardet
import pysrt
from .video import Episode, Movie
from .utils import sanitize, sanitize_release_group
logger = logging.getLogger(__name__)
#: Subtitle extensions
SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl')
class Subtitle(object):
"""Base class for subtitle.
:param language: language of the subtitle.
:type language: :class:`~babelfish.language.Language`
:param bool hearing_impaired: whether or not the subtitle is hearing impaired.
:param page_link: URL of the web page from which the subtitle can be downloaded.
:type page_link: str
:param encoding: Text encoding of the subtitle.
:type encoding: str
"""
#: Name of the provider that returns that class of subtitle
provider_name = ''
def __init__(self, language, hearing_impaired=False, page_link=None, encoding=None):
#: Language of the subtitle
self.language = language
#: Whether or not the subtitle is hearing impaired
self.hearing_impaired = hearing_impaired
#: URL of the web page from which the subtitle can be downloaded
self.page_link = page_link
#: Content as bytes
self.content = None
#: Encoding to decode with when accessing :attr:`text`
self.encoding = None
# validate the encoding
if encoding:
try:
self.encoding = codecs.lookup(encoding).name
except (TypeError, LookupError):
logger.debug('Unsupported encoding %s', encoding)
@property
def id(self):
"""Unique identifier of the subtitle"""
raise NotImplementedError
@property
def text(self):
"""Content as string
If :attr:`encoding` is None, the encoding is guessed with :meth:`guess_encoding`
"""
if not self.content:
return
if self.encoding:
return self.content.decode(self.encoding, errors='replace')
return self.content.decode(self.guess_encoding(), errors='replace')
def is_valid(self):
"""Check if a :attr:`text` is a valid SubRip format.
:return: whether or not the subtitle is valid.
:rtype: bool
"""
if not self.text:
return False
try:
pysrt.from_string(self.text, error_handling=pysrt.ERROR_RAISE)
except pysrt.Error as e:
if e.args[0] < 80:
return False
return True
def guess_encoding(self):<|fim▁hole|> """Guess encoding using the language, falling back on chardet.
:return: the guessed encoding.
:rtype: str
"""
logger.info('Guessing encoding for language %s', self.language)
# always try utf-8 first
encodings = ['utf-8']
# add language-specific encodings
if self.language.alpha3 == 'zho':
encodings.extend(['gb18030', 'big5'])
elif self.language.alpha3 == 'jpn':
encodings.append('shift-jis')
elif self.language.alpha3 == 'ara':
encodings.append('windows-1256')
elif self.language.alpha3 == 'heb':
encodings.append('windows-1255')
elif self.language.alpha3 == 'tur':
encodings.extend(['iso-8859-9', 'windows-1254'])
elif self.language.alpha3 == 'pol':
# Eastern European Group 1
encodings.extend(['windows-1250'])
elif self.language.alpha3 == 'bul':
# Eastern European Group 2
encodings.extend(['windows-1251'])
else:
# Western European (windows-1252)
encodings.append('latin-1')
# try to decode
logger.debug('Trying encodings %r', encodings)
for encoding in encodings:
try:
self.content.decode(encoding)
except UnicodeDecodeError:
pass
else:
logger.info('Guessed encoding %s', encoding)
return encoding
logger.warning('Could not guess encoding from language')
# fallback on chardet
encoding = chardet.detect(self.content)['encoding']
logger.info('Chardet found encoding %s', encoding)
return encoding
def get_matches(self, video):
"""Get the matches against the `video`.
:param video: the video to get the matches with.
:type video: :class:`~subliminal.video.Video`
:return: matches of the subtitle.
:rtype: set
"""
raise NotImplementedError
def __hash__(self):
return hash(self.provider_name + '-' + self.id)
def __repr__(self):
return '<%s %r [%s]>' % (self.__class__.__name__, self.id, self.language)
def get_subtitle_path(video_path, language=None, extension='.srt'):
"""Get the subtitle path using the `video_path` and `language`.
:param str video_path: path to the video.
:param language: language of the subtitle to put in the path.
:type language: :class:`~babelfish.language.Language`
:param str extension: extension of the subtitle.
:return: path of the subtitle.
:rtype: str
"""
subtitle_root = os.path.splitext(video_path)[0]
if language:
subtitle_root += '.' + str(language)
return subtitle_root + extension
def guess_matches(video, guess, partial=False):
"""Get matches between a `video` and a `guess`.
If a guess is `partial`, the absence information won't be counted as a match.
:param video: the video.
:type video: :class:`~subliminal.video.Video`
:param guess: the guess.
:type guess: dict
:param bool partial: whether or not the guess is partial.
:return: matches between the `video` and the `guess`.
:rtype: set
"""
matches = set()
if isinstance(video, Episode):
# series
if video.series and 'title' in guess and sanitize(guess['title']) == sanitize(video.series):
matches.add('series')
# title
if video.title and 'episode_title' in guess and sanitize(guess['episode_title']) == sanitize(video.title):
matches.add('title')
# season
if video.season and 'season' in guess and guess['season'] == video.season:
matches.add('season')
# episode
if video.episode and 'episode' in guess and guess['episode'] == video.episode:
matches.add('episode')
# year
if video.year and 'year' in guess and guess['year'] == video.year:
matches.add('year')
# count "no year" as an information
if not partial and video.original_series and 'year' not in guess:
matches.add('year')
elif isinstance(video, Movie):
# year
if video.year and 'year' in guess and guess['year'] == video.year:
matches.add('year')
# title
if video.title and 'title' in guess and sanitize(guess['title']) == sanitize(video.title):
matches.add('title')
# release_group
if (video.release_group and 'release_group' in guess and
sanitize_release_group(guess['release_group']) == sanitize_release_group(video.release_group)):
matches.add('release_group')
# resolution
if video.resolution and 'screen_size' in guess and guess['screen_size'] == video.resolution:
matches.add('resolution')
# format
if video.format and 'format' in guess and guess['format'].lower() == video.format.lower():
matches.add('format')
# video_codec
if video.video_codec and 'video_codec' in guess and guess['video_codec'] == video.video_codec:
matches.add('video_codec')
# audio_codec
if video.audio_codec and 'audio_codec' in guess and guess['audio_codec'] == video.audio_codec:
matches.add('audio_codec')
return matches
def fix_line_ending(content):
"""Fix line ending of `content` by changing it to \n.
:param bytes content: content of the subtitle.
:return: the content with fixed line endings.
:rtype: bytes
"""
return content.replace(b'\r\n', b'\n').replace(b'\r', b'\n')<|fim▁end|> | |
<|file_name|>list.ts<|end_file_name|><|fim▁begin|>import { autoinject } from 'aurelia-framework';
import { Customers } from './customers';
import { Router } from 'aurelia-router';
@autoinject()
export class List {
heading = 'Customer management';
customerList = [];
customers: Customers;
router: Router;
constructor(customers: Customers, router: Router) {
this.customers = customers;
this.router = router;
}
gotoCustomer(customer: any) {
this.router.navigateToRoute('edit', {id: customer.id});
}
new() {
this.router.navigateToRoute('create');
}
activate() {
return this.customers.getAll()
.then(customerList => this.customerList = customerList);<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>simplex_optim.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
"""
Module with simplex (Nelder-Mead) optimization for defining the flux and
position of a companion using the Negative Fake Companion.
"""
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import minimize
from .simplex_fmerit import chisquare
from ..var import frame_center
from ..conf import time_ini, timing, sep
__all__ = ['firstguess_from_coord',
'firstguess_simplex',
'firstguess']
def firstguess_from_coord(planet, center, cube, angs, PLSC, psf,
fwhm, annulus_width, aperture_radius, ncomp,
cube_ref=None, svd_mode='lapack', scaling=None,
fmerit='sum', collapse='median', f_range=None,
display=False, verbose=True, save=False, **kwargs):
"""
Determine a first guess for the flux of a companion at a given position
in the cube by doing a simple grid search evaluating the reduced chi2.
Parameters
----------
planet: numpy.array
The (x,y) position of the planet in the pca processed cube.
center: numpy.array
The (x,y) position of the cube center.
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
PLSC: float
The platescale, in arcsec per pixel.
psf: numpy.array
The scaled psf expressed as a numpy.array.
fwhm : float
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
ncomp: int
The number of principal components.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.
collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
f_range: numpy.array, optional
The range of flux tested values. If None, 20 values between 0 and 5000
are tested.
display: boolean, optional
If True, the figure chi2 vs. flux is displayed.
verbose: boolean
If True, display intermediate info in the shell.
save: boolean, optional
If True, the figure chi2 vs. flux is saved.
kwargs: dict, optional
Additional parameters are passed to the matplotlib plot method.
Returns
-------
out : numpy.array
The radial coordinates and the flux of the companion.
"""
xy = planet-center
r0= np.sqrt(xy[0]**2+xy[1]**2)
theta0 = np.mod(np.arctan2(xy[1],xy[0])/np.pi*180,360)
if f_range is not None:
n = f_range.shape[0]
else:
n = 20
f_range = np.linspace(0,5000,n)
chi2r = []
if verbose:
print('Step | flux | chi2r')
counter = 0
for j, f_guess in enumerate(f_range):
chi2r.append(chisquare((r0,theta0,f_guess), cube, angs, PLSC, psf,
fwhm, annulus_width, aperture_radius,(r0,theta0),
ncomp, cube_ref, svd_mode, scaling, fmerit,
collapse))
if chi2r[j] > chi2r[j-1]: counter+=1
if counter == 4: break
if verbose:
print('{}/{} {:.3f} {:.3f}'.format(j+1,n,f_guess,chi2r[j]))
chi2r = np.array(chi2r)
f0 = f_range[chi2r.argmin()]
if display:
plt.figure(figsize=kwargs.pop('figsize',(8,4)))
plt.title(kwargs.pop('title',''))
plt.xlim(f_range[0], f_range[:chi2r.shape[0]].max())
plt.ylim(chi2r.min()*0.9, chi2r.max()*1.1)
plt.plot(f_range[:chi2r.shape[0]],chi2r,
linestyle = kwargs.pop('linestyle','-'),
color = kwargs.pop('color','gray'),
marker = kwargs.pop('marker','.'),
markerfacecolor='r', markeredgecolor='r', **kwargs)
plt.xlabel('flux')
plt.ylabel(r'$\chi^2_{r}$')
plt.grid('on')
if save:
plt.savefig('chi2rVSflux.pdf')
if display:
plt.show()
return (r0,theta0,f0)
def firstguess_simplex(p, cube, angs, psf, plsc, ncomp, fwhm, annulus_width,
aperture_radius, cube_ref=None, svd_mode='lapack',
scaling=None, fmerit='sum', collapse='median', p_ini=None,
options=None, verbose=False, **kwargs):
"""
Determine the position of a companion using the negative fake companion
technique and a standard minimization algorithm (Default=Nelder-Mead) .
Parameters
----------
p : np.array
Estimate of the candidate position.
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
psf: numpy.array
The scaled psf expressed as a numpy.array.
plsc: float
The platescale, in arcsec per pixel.
ncomp: int
The number of principal components.
fwhm : float
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.<|fim▁hole|> collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
p_ini : np.array
Position (r, theta) of the circular aperture center.
options: dict, optional
The scipy.optimize.minimize options.
verbose : boolean, optional
If True, informations are displayed in the shell.
Returns
-------
out : scipy.optimize.minimize solution object
The solution of the minimization algorithm.
"""
if verbose:
print('')
print('{} minimization is running...'.format(options.get('method','Nelder-Mead')))
if p_ini is None:
p_ini = p
solu = minimize(chisquare, p, args=(cube, angs, plsc, psf, fwhm, annulus_width,
aperture_radius, p_ini, ncomp, cube_ref,
svd_mode, scaling, fmerit, collapse),
method = options.pop('method','Nelder-Mead'),
options=options, **kwargs)
if verbose: print(solu)
return solu
def firstguess(cube, angs, psfn, ncomp, plsc, planets_xy_coord, fwhm=4,
annulus_width=3, aperture_radius=4, cube_ref=None,
svd_mode='lapack', scaling=None, fmerit='sum', collapse='median',
p_ini=None, f_range=None, simplex=True, simplex_options=None,
display=False, verbose=True, save=False, figure_options=None):
""" Determines a first guess for the position and the flux of a planet.
We process the cube without injecting any negative fake companion.
This leads to the visual detection of the planet(s). For each of them,
one can estimate the (x,y) coordinates in pixel for the position of the
star, as well as the planet(s).
From the (x,y) coordinates in pixels for the star and planet(s), we can
estimate a preliminary guess for the position and flux for each planet
by using the method "firstguess_from_coord". The argument "f_range" allows
to indicate prior limits for the flux (optional, default: None).
This step can be reiterate to refine the preliminary guess for the flux.
We can go a step further by using a Simplex Nelder_Mead minimization to
estimate the first guess based on the preliminary guess.
Parameters
----------
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
psfn: numpy.array
The centered and normalized (flux in a 1*FWHM aperture must equal 1)
PSF 2d-array.
ncomp: int
The number of principal components.
plsc: float
The platescale, in arcsec per pixel.
planet_xy_coord: array or list
The list of (x,y) positions of the planets.
fwhm : float, optional
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.
collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
p_ini: numpy.array
Position (r, theta) of the circular aperture center.
f_range: numpy.array, optional
The range of flux tested values. If None, 20 values between 0 and 5000
are tested.
simplex: boolean, optional
If True, the Nelder-Mead minimization is performed after the flux grid
search.
simplex_options: dict, optional
The scipy.optimize.minimize options.
display: boolean, optional
If True, the figure chi2 vs. flux is displayed.
verbose: boolean
If True, display intermediate info in the shell.
save: boolean, optional
If True, the figure chi2 vs. flux is saved.
figure_options: dict, optional
Additional parameters are passed to the matplotlib plot method.
Returns
-------
out : The radial coordinates and the flux of the companion.
WARNING: POLAR ANGLE IS NOT THE CONVENTIONAL NORTH-TO-EAST P.A.
"""
if verbose: start_time = time_ini()
if figure_options is None:
figure_options = {'color':'gray', 'marker':'.',
'title':r'$\chi^2_{r}$ vs flux'}
planets_xy_coord = np.array(planets_xy_coord)
n_planet = planets_xy_coord.shape[0]
center_xy_coord = np.array(frame_center(cube[0]))
if f_range is None:
f_range = np.linspace(0,5000,20)
if simplex_options is None:
simplex_options = {'xtol':1e-1, 'maxiter':500, 'maxfev':1000}
r_0 = np.zeros(n_planet)
theta_0 = np.zeros_like(r_0)
f_0 = np.zeros_like(r_0)
for index_planet in range(n_planet):
if verbose:
print('')
print(sep)
print(' Planet {} '.format(index_planet))
print(sep)
print('')
msg2 = 'Planet {}: flux estimation at the position [{},{}], running ...'
print(msg2.format(index_planet,planets_xy_coord[index_planet,0],
planets_xy_coord[index_planet,1]))
res_init = firstguess_from_coord(planets_xy_coord[index_planet],
center_xy_coord, cube, angs, plsc, psfn,
fwhm, annulus_width, aperture_radius,
ncomp, f_range=f_range,
cube_ref=cube_ref, svd_mode=svd_mode,
scaling=scaling, fmerit=fmerit,
collapse=collapse, display=display,
verbose=verbose, save=save,
**figure_options)
r_pre, theta_pre, f_pre = res_init
if verbose:
msg3 = 'Planet {}: preliminary guess: (r, theta, f)=({:.1f}, {:.1f}, {:.1f})'
print(msg3.format(index_planet,r_pre, theta_pre, f_pre))
if simplex:
if verbose:
msg4 = 'Planet {}: Simplex Nelder-Mead minimization, running ...'
print(msg4.format(index_planet))
res = firstguess_simplex((r_pre,theta_pre,f_pre), cube, angs, psfn,
plsc, ncomp, fwhm, annulus_width,
aperture_radius, cube_ref=cube_ref,
svd_mode=svd_mode, scaling=scaling,
fmerit=fmerit, collapse=collapse, p_ini=p_ini,
options=simplex_options, verbose=False)
r_0[index_planet], theta_0[index_planet], f_0[index_planet] = res.x
if verbose:
msg5 = 'Planet {}: Success: {}, nit: {}, nfev: {}, chi2r: {}'
print(msg5.format(index_planet,res.success,res.nit,res.nfev,
res.fun))
print('message: {}'.format(res.message))
else:
if verbose:
msg4bis = 'Planet {}: Simplex Nelder-Mead minimization skipped.'
print(msg4bis.format(index_planet))
r_0[index_planet] = r_pre
theta_0[index_planet] = theta_pre
f_0[index_planet] = f_pre
if verbose:
centy, centx = frame_center(cube[0])
posy = r_0 * np.sin(np.deg2rad(theta_0[index_planet])) + centy
posx = r_0 * np.cos(np.deg2rad(theta_0[index_planet])) + centx
msg6 = 'Planet {}: simplex result: (r, theta, f)=({:.3f}, {:.3f}'
msg6 += ', {:.3f}) at \n (X,Y)=({:.2f}, {:.2f})'
print(msg6.format(index_planet, r_0[index_planet],
theta_0[index_planet], f_0[index_planet], posx[0], posy[0]))
if verbose:
print('\n', sep, '\nDONE !\n', sep)
timing(start_time)
return (r_0,theta_0,f_0)<|fim▁end|> | |
<|file_name|>test_postgres.py<|end_file_name|><|fim▁begin|># Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for running and managing PostgreSQL with Flocker.
"""
from unittest import skipUnless
from uuid import uuid4
from pyrsistent import pmap, freeze, thaw
from twisted.python.filepath import FilePath
from twisted.trial.unittest import TestCase
from flocker.control import (
Application, DockerImage, AttachedVolume, Port, Dataset, Manifestation,
)
from flocker.testtools import loop_until
from .testtools import (assert_expected_deployment, flocker_deploy, get_nodes,
require_flocker_cli, require_moving_backend)
from ..testtools import REALISTIC_BLOCKDEVICE_SIZE
try:
from pg8000 import connect, InterfaceError, ProgrammingError
PG8000_INSTALLED = True
except ImportError:
PG8000_INSTALLED = False
POSTGRES_INTERNAL_PORT = 5432
POSTGRES_EXTERNAL_PORT = 5432
POSTGRES_APPLICATION_NAME = u"postgres-volume-example"
POSTGRES_IMAGE = u"postgres"
POSTGRES_VOLUME_MOUNTPOINT = u'/var/lib/postgresql/data'
POSTGRES_APPLICATION = Application(
name=POSTGRES_APPLICATION_NAME,
image=DockerImage.from_string(POSTGRES_IMAGE + u':latest'),
ports=frozenset([
Port(internal_port=POSTGRES_INTERNAL_PORT,
external_port=POSTGRES_EXTERNAL_PORT),
]),
volume=AttachedVolume(
manifestation=Manifestation(
dataset=Dataset(
dataset_id=unicode(uuid4()),
metadata=pmap({"name": POSTGRES_APPLICATION_NAME}),
maximum_size=REALISTIC_BLOCKDEVICE_SIZE),
primary=True),
mountpoint=FilePath(POSTGRES_VOLUME_MOUNTPOINT),
),
)
class PostgresTests(TestCase):
"""
Tests for running and managing PostgreSQL with Flocker.
"""
@require_flocker_cli
def setUp(self):
"""
Deploy PostgreSQL to a node.
"""
getting_nodes = get_nodes(self, num_nodes=2)
def deploy_postgres(node_ips):
self.node_1, self.node_2 = node_ips
postgres_deployment = {
u"version": 1,
u"nodes": {
self.node_1: [POSTGRES_APPLICATION_NAME],
self.node_2: [],
},
}
self.postgres_deployment_moved = {
u"version": 1,
u"nodes": {
self.node_1: [],
self.node_2: [POSTGRES_APPLICATION_NAME],
},
}
self.postgres_application = {
u"version": 1,
u"applications": {
POSTGRES_APPLICATION_NAME: {
u"image": POSTGRES_IMAGE,
u"ports": [{
u"internal": POSTGRES_INTERNAL_PORT,
u"external": POSTGRES_EXTERNAL_PORT,
}],
u"volume": {
u"dataset_id":
POSTGRES_APPLICATION.volume.dataset.dataset_id,
# The location within the container where the data
# volume will be mounted; see:
# https://github.com/docker-library/postgres/blob/
# docker/Dockerfile.template
u"mountpoint": POSTGRES_VOLUME_MOUNTPOINT,
u"maximum_size":
"%d" % (REALISTIC_BLOCKDEVICE_SIZE,),
},
},
},
}
self.postgres_application_different_port = thaw(freeze(
self.postgres_application).transform(
[u"applications", POSTGRES_APPLICATION_NAME, u"ports", 0,
u"external"], POSTGRES_EXTERNAL_PORT + 1))
flocker_deploy(self, postgres_deployment,
self.postgres_application)
getting_nodes.addCallback(deploy_postgres)
return getting_nodes
def test_deploy(self):
"""
Verify that Docker reports that PostgreSQL is running on one node and
not another.
"""
return assert_expected_deployment(self, {
self.node_1: set([POSTGRES_APPLICATION]),
self.node_2: set([]),
})
@require_moving_backend
def test_moving_postgres(self):
"""
It is possible to move PostgreSQL to a new node.
"""
flocker_deploy(self, self.postgres_deployment_moved,
self.postgres_application)
return assert_expected_deployment(self, {
self.node_1: set([]),
self.node_2: set([POSTGRES_APPLICATION]),
})
def _get_postgres_connection(self, host, user, port, database=None):
"""
Returns a ``Deferred`` which fires with a pg800 connection when one
has been created.
See http://pythonhosted.org//pg8000/dbapi.html#pg8000.connect for
parameter information.
"""
def connect_to_postgres():
try:
return connect(host=host, user=user, port=port,<|fim▁hole|> except (InterfaceError, ProgrammingError):
return False
d = loop_until(connect_to_postgres)
return d
@skipUnless(PG8000_INSTALLED, "pg8000 not installed")
@require_moving_backend
def test_moving_postgres_data(self):
"""
PostgreSQL and its data can be deployed and moved with Flocker. In
particular, if PostgreSQL is deployed to a node, and data added to it,
and then the application is moved to another node, the data remains
available.
"""
database = b'flockertest'
user = b'postgres'
connecting_to_application = self._get_postgres_connection(
host=self.node_1,
user=user,
port=POSTGRES_EXTERNAL_PORT,
)
def create_database(connection_to_application):
connection_to_application.autocommit = True
application_cursor = connection_to_application.cursor()
application_cursor.execute("CREATE DATABASE flockertest;")
application_cursor.close()
connection_to_application.close()
connecting_to_application.addCallback(create_database)
def connect_to_database(ignored):
return self._get_postgres_connection(
host=self.node_1,
user=user,
port=POSTGRES_EXTERNAL_PORT,
database=database,
)
connecting_to_database = connecting_to_application.addCallback(
connect_to_database)
def add_data_node_1(db_connection_node_1):
db_node_1_cursor = db_connection_node_1.cursor()
db_node_1_cursor.execute(
"CREATE TABLE testtable (testcolumn int);")
db_node_1_cursor.execute(
"INSERT INTO testtable (testcolumn) VALUES (3);")
db_node_1_cursor.execute("SELECT * FROM testtable;")
db_connection_node_1.commit()
fetched_data = db_node_1_cursor.fetchone()[0]
db_node_1_cursor.close()
db_connection_node_1.close()
self.assertEqual(fetched_data, 3)
connecting_to_database.addCallback(add_data_node_1)
def get_postgres_node_2(ignored):
"""
Move PostgreSQL to ``node_2`` and return a ``Deferred`` which fires
with a connection to the previously created database on ``node_2``.
"""
flocker_deploy(self, self.postgres_deployment_moved,
self.postgres_application_different_port)
return self._get_postgres_connection(
host=self.node_2,
user=user,
port=POSTGRES_EXTERNAL_PORT + 1,
database=database,
)
getting_postgres_2 = connecting_to_database.addCallback(
get_postgres_node_2)
def verify_data_moves(db_connection_node_2):
db_node_2_cursor = db_connection_node_2.cursor()
db_node_2_cursor.execute("SELECT * FROM testtable;")
fetched_data = db_node_2_cursor.fetchone()[0]
db_node_2_cursor.close()
db_connection_node_2.close()
self.assertEqual(fetched_data, 3)
verifying_data_moves = getting_postgres_2.addCallback(
verify_data_moves)
return verifying_data_moves<|fim▁end|> | database=database) |
<|file_name|>processor.go<|end_file_name|><|fim▁begin|>package slack
import (
"encoding/json"
"fmt"
"log"
"regexp"
"strings"
)
const (
slackEventTypeMessage = "message"
maxMessageSize = 4000
maxMessageLines = 25
)
// Processor type processes inbound events from Slack
type Processor struct {
// Connection to Slack
con *Connection
// Slack user information relating to the bot account
self User
// a sequence number to uniquely identify sent messages and correlate with acks from Slack
sequence int
// map of event handler functions to handle types of Slack event
eventHandlers map[string]func(*Processor, map[string]interface{}, []byte)
// map of users who are members of the Slack group
users map[string]User
}
// event type represents an event sent to Slack e.g. messages
type event struct {
Id int `json:"id"`
Type string `json:"type"`
Channel string `json:"channel"`
Text string `json:"text"`
}
// user_change event type represents a user change event from Slack
type userChangeEvent struct {
Type string `json:"type"`
UpdatedUser User `json:"user"`
}
// send Event to Slack
func (p *Processor) sendEvent(eventType string, channel string, text string) error {
p.sequence++
response := &event{Id: p.sequence, Type: eventType, Channel: channel, Text: text}
responseJson, err := json.Marshal(response)
if err != nil {
return err
}
p.con.Write(responseJson)
return nil
}
/*
// Write the message on the specified channel to Slack
func (p *Processor) Write(channel string, text string) error {
for len(text) > 0 {
if len(text) <= maxMessageSize {
if err := p.sendEvent(slackEventTypeMessage, channel, text); err != nil {
return err
}
text = ""
} else {
// split message at a convenient place
maxSizeChunk := text[:maxMessageSize]
var breakIndex int
if lastLineBreak := strings.LastIndex(maxSizeChunk, "\n"); lastLineBreak > -1 {
breakIndex = lastLineBreak
} else if lastWordBreak := strings.LastIndexAny(maxSizeChunk, "\n\t .,/\\-(){}[]|=+*&"); lastWordBreak > -1 {
breakIndex = lastWordBreak
} else {
breakIndex = maxMessageSize
}
if err := p.sendEvent(slackEventTypeMessage, channel, text[:breakIndex]); err != nil {
return err
}
if breakIndex != maxMessageSize {
breakIndex++
}
text = text[breakIndex:]
}
}
return nil
}
*/
// Write the message on the specified channel to Slack
func (p *Processor) Write(channel string, text string) error {
for len(text) > 0 {
lines := strings.Count(text, "\n")
if len(text) <= maxMessageSize && lines <= maxMessageLines {
if err := p.sendEvent(slackEventTypeMessage, channel, text); err != nil {
return err
}
text = ""
} else {
// split message at a convenient place
var breakIndex int
maxSizeChunk := text
if len(text) > maxMessageSize {
maxSizeChunk := text[:maxMessageSize]
lines = strings.Count(maxSizeChunk, "\n")
}
if lines > maxMessageLines {
var index int
for n := 0; index < len(maxSizeChunk) && n < maxMessageLines; n++ {
p := strings.Index(maxSizeChunk[index:], "\n")
if p == -1 {
break
}
index += p + 1
}
breakIndex = index
} else if lastLineBreak := strings.LastIndex(maxSizeChunk, "\n"); lastLineBreak > -1 {
breakIndex = lastLineBreak
} else if lastWordBreak := strings.LastIndexAny(maxSizeChunk, "\n\t .,/\\-(){}[]|=+*&"); lastWordBreak > -1 {
breakIndex = lastWordBreak
} else {
breakIndex = maxMessageSize
}
if err := p.sendEvent(slackEventTypeMessage, channel, text[:breakIndex]); err != nil {
return err
}
if breakIndex != maxMessageSize && lines <= maxMessageLines {
breakIndex++
}
text = text[breakIndex:]
}
}
return nil
}
// Start processing events from Slack
func (p *Processor) Start() {
for {
msg := p.con.Read()
log.Printf("%s", msg)
var data map[string]interface{}
err := json.Unmarshal(msg, &data)
if err != nil {
fmt.Printf("%T\n%s\n%#v\n", err, err, err)
switch v := err.(type) {
case *json.SyntaxError:
fmt.Println(string(msg[v.Offset-40 : v.Offset]))
}
log.Printf("%s", msg)
continue
}
// if reply_to attribute is present the event is an ack' for a sent message
_, isReply := data["reply_to"]
subtype, ok := data["subtype"]
var isMessageChangedEvent bool
if ok {
isMessageChangedEvent = (subtype.(string) == "message_changed" || subtype.(string) == "message_deleted")
}
if !isReply && !isMessageChangedEvent {
handler, ok := p.eventHandlers[data["type"].(string)]
if ok {
handler(p, data, msg)
}
}
}
}
// updateUser updates or adds (if not already existing) the specifed user
func (p *Processor) updateUser(user User) {
p.users[user.Id] = user
}
// onConnected is a callback for when the client connects (or reconnects) to Slack.
func (p *Processor) onConnected(con *Connection) {
p.self = con.config.Self
log.Printf("Connected to Slack as %s", p.self.Name)
p.users = make(map[string]User)
for _, user := range con.config.Users {
p.updateUser(user)
}
}
// type for callbacks to receive messages from Slack
type messageProcessor func(*Message)
// Starts processing events on the connection from Slack and passes any messages to the hear callback and only
// messages addressed to the bot to the respond callback
func EventProcessor(con *Connection, respond messageProcessor, hear messageProcessor) {
p := Processor{
con: con,
self: con.config.Self,
eventHandlers: map[string]func(*Processor, map[string]interface{}, []byte){
slackEventTypeMessage: func(p *Processor, event map[string]interface{}, rawEvent []byte) {
filterMessage(p, event, respond, hear)
},
"user_change": func(p *Processor, event map[string]interface{}, rawEvent []byte) {
var userEvent userChangeEvent
err := json.Unmarshal(rawEvent, &userEvent)
if err != nil {
fmt.Printf("%T\n%s\n%#v\n", err, err, err)
switch v := err.(type) {
case *json.SyntaxError:
fmt.Println(string(rawEvent[v.Offset-40 : v.Offset]))
}
log.Printf("%s", rawEvent)
}
p.updateUser(userEvent.UpdatedUser)
},
"hello": func(p *Processor, event map[string]interface{}, rawEvent []byte) {
p.onConnected(con)
},
"error": func(p *Processor, event map[string]interface{}, rawEvent []byte) {
log.Printf("Error received from Slack: %s", rawEvent)
},
},
users: make(map[string]User),
}
p.Start()
}
// Invoke one of the specified callbacks for the message if appropriate
func filterMessage(p *Processor, data map[string]interface{}, respond messageProcessor, hear messageProcessor) {
var userFullName string
var userId string
user, ok := data["user"]
if ok {
userId = user.(string)
user, exists := p.users[userId]
if exists {
userFullName = user.RealName
}
}
// process messages directed at Talbot
r, _ := regexp.Compile("^(<@" + p.self.Id + ">|@?" + p.self.Name + "):? (.+)")
text, ok := data["text"]
if !ok || text == nil {
return
}
matches := r.FindStringSubmatch(text.(string))
if len(matches) == 3 {
if respond != nil {
m := &Message{eventStream: p, responseStrategy: reply, Text: matches[2], From: userFullName, fromId: userId, channel: data["channel"].(string)}
respond(m)
}
} else if data["channel"].(string)[0] == 'D' {
if respond != nil {<|fim▁hole|> respond(m)
}
} else {
if hear != nil {
m := &Message{eventStream: p, responseStrategy: send, Text: text.(string), From: userFullName, fromId: userId, channel: data["channel"].(string)}
hear(m)
}
}
}<|fim▁end|> | // process direct messages
m := &Message{eventStream: p, responseStrategy: send, Text: text.(string), From: userFullName, fromId: userId, channel: data["channel"].(string)} |
<|file_name|>JMeterClientConnectionOperator.java<|end_file_name|><|fim▁begin|>/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package org.apache.jmeter.protocol.http.sampler;
import java.io.IOException;
import java.net.Socket;
import javax.net.ssl.SSLSocket;
import org.apache.http.HttpHost;
import org.apache.http.conn.DnsResolver;
import org.apache.http.conn.OperatedClientConnection;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.impl.conn.DefaultClientConnection;
import org.apache.http.impl.conn.DefaultClientConnectionOperator;
import org.apache.jmeter.util.HostNameSetter;
/**
* Custom implementation of {@link DefaultClientConnectionOperator} to fix SNI Issue
* @see "https://bz.apache.org/bugzilla/show_bug.cgi?id=57935"
* @since 3.0
* TODO Remove it when full upgrade to 4.5.X is done and cleanup is made in the Socket Factory of JMeter that handles client certificates and Slow socket
*/
public class JMeterClientConnectionOperator extends
DefaultClientConnectionOperator {
/**
* @param schemes
* the scheme registry
*/
public JMeterClientConnectionOperator(final SchemeRegistry schemes) {
super(schemes);
}
/**
* @param schemes
* the scheme registry
* @param dnsResolver
* the custom DNS lookup mechanism
*/
public JMeterClientConnectionOperator(final SchemeRegistry schemes,
final DnsResolver dnsResolver) {
super(schemes, dnsResolver);
}
@Override
public OperatedClientConnection createConnection() {
return new JMeterDefaultClientConnection();
}
private static class JMeterDefaultClientConnection extends DefaultClientConnection {
public JMeterDefaultClientConnection() {
super();
}
/* (non-Javadoc)
* @see org.apache.http.impl.conn.DefaultClientConnection#opening(java.net.Socket, org.apache.http.HttpHost)
*/
@Override
public void opening(Socket sock, HttpHost target) throws IOException {
super.opening(sock, target);
if(sock instanceof SSLSocket) {
HostNameSetter.setServerNameIndication(target.getHostName(), (SSLSocket) sock);
}
}
}
}<|fim▁end|> | * |
<|file_name|>TextOutput.js<|end_file_name|><|fim▁begin|>import {appendHtml, combine} from './../util';
const ELEMENT_NAMES = {
frameName: 'text-frame',
messageName: 'text-message',
indicatorName: 'text-indicator'
};
let createElements = (container, names) => {
const elements = '\
<div class="text-frame" id="' + names.frameName + '">\
<span class="text-message" id="' + names.messageName + '"></span>\
<span id="' + names.indicatorName + '">▼</span>\
</div>';
appendHtml(container, elements);
}
export default class TextOutput {
constructor(parent, engine) {
let elementNames = Object.assign(ELEMENT_NAMES, engine.overrides.customElementNames);
if (!engine.overrides.useCustomElements) {
createElements(parent, elementNames);
}
this._textMessages = [];
this.engine = engine;<|fim▁hole|> this.textMessage = document.getElementById(elementNames.messageName);
this.textIndicator = document.getElementById(elementNames.indicatorName)
this.textMessageFrame.onclick = () => engine.drawMessages();
engine.clearText = combine(engine.clearText, this.clearText.bind(this));
engine.displayText = combine(engine.displayText, this.displayText.bind(this));
engine.drawMessages = combine(engine.drawMessages, this.drawMessages.bind(this));
engine.actionExecutor.registerAction("text", (options, engine, player, callback) => {
engine.displayText(options.text.split("\n"));
}, false, true);
}
clearText () {
this._textMessages = [];
this.textMessageFrame.classList.remove("in");
this.textMessage.innerHTML = "";
this.textIndicator.classList.remove("in");
this.engine.unpause();
}
displayText (text) {
this._textMessages = this._textMessages.concat(text);
}
drawMessages () {
if (this._textMessages.length > 0) {
this.engine.pause();
const text = this._textMessages.splice(0, 1)[0];
this.textMessage.innerHTML = text;
if (!("in" in this.textMessageFrame.classList)) {
this.textMessageFrame.classList.add("in");
}
if (this._textMessages.length >= 1) {
this.textIndicator.classList.add("in");
} else {
this.textIndicator.classList.remove("in");
}
} else {
this.clearText();
}
}
}<|fim▁end|> |
this.textMessageFrame = document.getElementById(elementNames.frameName); |
<|file_name|>test_serialize.py<|end_file_name|><|fim▁begin|>import json
import time
import jps
class MessageHolder(object):
def __init__(self):
self.saved_msg = []
def __call__(self, msg):
self.saved_msg.append(msg)
def test_pubsub_with_serialize_json():
holder = MessageHolder()
sub = jps.Subscriber('/serialize_hoge1', holder,<|fim▁hole|> deserializer=json.loads)
pub = jps.Publisher('/serialize_hoge1',
serializer=json.dumps)
time.sleep(0.1)
obj = {'da1': 1, 'name': 'hoge'}
pub.publish(obj)
time.sleep(0.1)
sub.spin_once()
assert len(holder.saved_msg) == 1
assert holder.saved_msg[0]['da1'] == 1
assert holder.saved_msg[0]['name'] == 'hoge'<|fim▁end|> | |
<|file_name|>issue-54109-without-witness.rs<|end_file_name|><|fim▁begin|>// run-rustfix
// This test is to check if suggestions can be applied automatically.
#![allow(dead_code, unused_parens)]
fn main() {}
fn test_and() {
let a = true;
let b = false;
let _ = a and b; //~ ERROR `and` is not a logical operator
if a and b { //~ ERROR `and` is not a logical operator<|fim▁hole|>fn test_or() {
let a = true;
let b = false;
let _ = a or b; //~ ERROR `or` is not a logical operator
if a or b { //~ ERROR `or` is not a logical operator
println!("both");
}
}
fn test_and_par() {
let a = true;
let b = false;
if (a and b) { //~ ERROR `and` is not a logical operator
println!("both");
}
}
fn test_or_par() {
let a = true;
let b = false;
if (a or b) { //~ ERROR `or` is not a logical operator
println!("both");
}
}
fn test_while_and() {
let a = true;
let b = false;
while a and b { //~ ERROR `and` is not a logical operator
println!("both");
}
}
fn test_while_or() {
let a = true;
let b = false;
while a or b { //~ ERROR `or` is not a logical operator
println!("both");
}
}<|fim▁end|> | println!("both");
}
}
|
<|file_name|>logfwd.rs<|end_file_name|><|fim▁begin|>use libc::c_char;
use std::sync::Once;
use crate::raw;
/// Manually initialize logging.
///
/// It is optional to call this function, and safe to do so more than once.
pub fn init() {
static mut INIT: Once = Once::new();
unsafe {
INIT.call_once(|| {
init_impl();
});
}
}
#[cfg(feature = "log")]
fn init_impl() {
use log::LevelFilter;
// trace to our callback
unsafe {
raw::rs_trace_to(trace);
}
// determine log level
// this is useful because if the setted level is not Debug we can optimize librsync log
// calls
let level = match log::max_level() {
LevelFilter::Info => raw::RS_LOG_NOTICE,
LevelFilter::Debug | LevelFilter::Trace => raw::RS_LOG_DEBUG,
_ => raw::RS_LOG_WARNING,
};
unsafe {
raw::rs_trace_set_level(level);
}
}
#[cfg(feature = "log")]
extern "C" fn trace(level: raw::rs_loglevel, msg: *const c_char) {<|fim▁hole|> raw::RS_LOG_EMERG | raw::RS_LOG_ALERT | raw::RS_LOG_CRIT | raw::RS_LOG_ERR => Level::Error,
raw::RS_LOG_WARNING => Level::Warn,
raw::RS_LOG_NOTICE | raw::RS_LOG_INFO => Level::Info,
raw::RS_LOG_DEBUG => Level::Debug,
_ => Level::Error,
};
let msg = unsafe { CStr::from_ptr(msg).to_string_lossy() };
log!(target: "librsync", level, "{}", msg);
}
#[cfg(not(feature = "log"))]
fn init_impl() {
unsafe {
raw::rs_trace_to(trace);
raw::rs_trace_set_level(raw::RS_LOG_EMERG);
}
extern "C" fn trace(_level: raw::rs_loglevel, _msg: *const c_char) {}
}<|fim▁end|> | use log::Level;
use std::ffi::CStr;
let level = match level { |
<|file_name|>query-interface.js<|end_file_name|><|fim▁begin|>'use strict';
const Utils = require('./utils');
const _ = require('lodash');
const DataTypes = require('./data-types');
const SQLiteQueryInterface = require('./dialects/sqlite/query-interface');
const MSSSQLQueryInterface = require('./dialects/mssql/query-interface');
const MySQLQueryInterface = require('./dialects/mysql/query-interface');
const Transaction = require('./transaction');
const Promise = require('./promise');
const QueryTypes = require('./query-types');
/**
* The interface that Sequelize uses to talk to all databases
* @class QueryInterface
* @private
*/
class QueryInterface {
constructor(sequelize) {
this.sequelize = sequelize;
this.QueryGenerator = this.sequelize.dialect.QueryGenerator;
}
createSchema(schema, options) {
options = options || {};
const sql = this.QueryGenerator.createSchema(schema);
return this.sequelize.query(sql, options);
}
dropSchema(schema, options) {
options = options || {};
const sql = this.QueryGenerator.dropSchema(schema);
return this.sequelize.query(sql, options);
}
dropAllSchemas(options) {
options = options || {};
if (!this.QueryGenerator._dialect.supports.schemas) {
return this.sequelize.drop(options);
} else {
return this.showAllSchemas(options).map(schemaName => this.dropSchema(schemaName, options));
}
}
showAllSchemas(options) {
options = _.assign({}, options, {
raw: true,
type: this.sequelize.QueryTypes.SELECT
});
const showSchemasSql = this.QueryGenerator.showSchemasQuery();
return this.sequelize.query(showSchemasSql, options).then(schemaNames => Utils._.flatten(
Utils._.map(schemaNames, value => (!!value.schema_name ? value.schema_name : value))
));
}
databaseVersion(options) {
return this.sequelize.query(
this.QueryGenerator.versionQuery(),
_.assign({}, options, { type: QueryTypes.VERSION })
);
}
createTable(tableName, attributes, options, model) {
const keys = Object.keys(attributes);
const keyLen = keys.length;
let sql = '';
let i = 0;
options = _.clone(options) || {};
attributes = Utils._.mapValues(attributes, attribute => {
if (!Utils._.isPlainObject(attribute)) {
attribute = { type: attribute, allowNull: true };
}
attribute = this.sequelize.normalizeAttribute(attribute);
return attribute;
});
// Postgres requires a special SQL command for enums
if (this.sequelize.options.dialect === 'postgres') {
const promises = [];
for (i = 0; i < keyLen; i++) {
if (attributes[keys[i]].type instanceof DataTypes.ENUM) {
sql = this.QueryGenerator.pgListEnums(tableName, attributes[keys[i]].field || keys[i], options);
promises.push(this.sequelize.query(
sql,
_.assign({}, options, { plain: true, raw: true, type: QueryTypes.SELECT })
));
}
}
return Promise.all(promises).then(results => {
const promises = [];
let enumIdx = 0;
for (i = 0; i < keyLen; i++) {
if (attributes[keys[i]].type instanceof DataTypes.ENUM) {
// If the enum type doesn't exist then create it
if (!results[enumIdx]) {
sql = this.QueryGenerator.pgEnum(tableName, attributes[keys[i]].field || keys[i], attributes[keys[i]], options);
promises.push(this.sequelize.query(
sql,
_.assign({}, options, { raw: true })
));
} else if (!!results[enumIdx] && !!model) {
const enumVals = this.QueryGenerator.fromArray(results[enumIdx].enum_value);
const vals = model.rawAttributes[keys[i]].values;
vals.forEach((value, idx) => {
// reset out after/before options since it's for every enum value
const valueOptions = _.clone(options);
valueOptions.before = null;
valueOptions.after = null;
if (enumVals.indexOf(value) === -1) {
if (!!vals[idx + 1]) {
valueOptions.before = vals[idx + 1];
}
else if (!!vals[idx - 1]) {
valueOptions.after = vals[idx - 1];
}
valueOptions.supportsSearchPath = false;
promises.push(this.sequelize.query(this.QueryGenerator.pgEnumAdd(tableName, keys[i], value, valueOptions), valueOptions));
}
});
enumIdx++;
}
}
}
if (!tableName.schema &&
(options.schema || (!!model && model._schema))) {
tableName = this.QueryGenerator.addSchema({
tableName,
_schema: (!!model && model._schema) || options.schema
});
}
attributes = this.QueryGenerator.attributesToSQL(attributes, {
context: 'createTable'
});
sql = this.QueryGenerator.createTableQuery(tableName, attributes, options);
return Promise.all(promises).then(() => {
return this.sequelize.query(sql, options);
});
});
} else {
if (!tableName.schema &&
(options.schema || (!!model && model._schema))) {
tableName = this.QueryGenerator.addSchema({
tableName,
_schema: (!!model && model._schema) || options.schema
});
}
attributes = this.QueryGenerator.attributesToSQL(attributes, {
context: 'createTable'
});
sql = this.QueryGenerator.createTableQuery(tableName, attributes, options);
return this.sequelize.query(sql, options);
}
}
dropTable(tableName, options) {
// if we're forcing we should be cascading unless explicitly stated otherwise
options = _.clone(options) || {};
options.cascade = options.cascade || options.force || false;
let sql = this.QueryGenerator.dropTableQuery(tableName, options);
return this.sequelize.query(sql, options).then(() => {
const promises = [];
// Since postgres has a special case for enums, we should drop the related
// enum type within the table and attribute
if (this.sequelize.options.dialect === 'postgres') {
const instanceTable = this.sequelize.modelManager.getModel(tableName, { attribute: 'tableName' });
if (!!instanceTable) {
const getTableName = (!options || !options.schema || options.schema === 'public' ? '' : options.schema + '_') + tableName;
const keys = Object.keys(instanceTable.rawAttributes);
const keyLen = keys.length;
for (let i = 0; i < keyLen; i++) {
if (instanceTable.rawAttributes[keys[i]].type instanceof DataTypes.ENUM) {
sql = this.QueryGenerator.pgEnumDrop(getTableName, keys[i]);
options.supportsSearchPath = false;
promises.push(this.sequelize.query(sql, _.assign({}, options, { raw: true })));
}
}
}
}
return Promise.all(promises).get(0);
});
}
dropAllTables(options) {
options = options || {};
const skip = options.skip || [];
const dropAllTables = tableNames => Promise.each(tableNames, tableName => {
// if tableName is not in the Array of tables names then dont drop it
if (skip.indexOf(tableName.tableName || tableName) === -1) {
return this.dropTable(tableName, _.assign({}, options, { cascade: true }) );
}
});
return this.showAllTables(options).then(tableNames => {
if (this.sequelize.options.dialect === 'sqlite') {
return this.sequelize.query('PRAGMA foreign_keys;', options).then(result => {
const foreignKeysAreEnabled = result.foreign_keys === 1;
if (foreignKeysAreEnabled) {
return this.sequelize.query('PRAGMA foreign_keys = OFF', options)
.then(() => dropAllTables(tableNames))
.then(() => this.sequelize.query('PRAGMA foreign_keys = ON', options));
} else {
return dropAllTables(tableNames);
}
});
} else {
return this.getForeignKeysForTables(tableNames, options).then(foreignKeys => {
const promises = [];
tableNames.forEach(tableName => {
let normalizedTableName = tableName;
if (Utils._.isObject(tableName)) {
normalizedTableName = tableName.schema + '.' + tableName.tableName;
}
foreignKeys[normalizedTableName].forEach(foreignKey => {
const sql = this.QueryGenerator.dropForeignKeyQuery(tableName, foreignKey);
promises.push(this.sequelize.query(sql, options));
});
});
return Promise.all(promises).then(() => dropAllTables(tableNames));
});
}
});
}
dropAllEnums(options) {
if (this.sequelize.getDialect() !== 'postgres') {
return Promise.resolve();
}
options = options || {};
return this.pgListEnums(null, options).map(result => this.sequelize.query(
this.QueryGenerator.pgEnumDrop(null, null, this.QueryGenerator.pgEscapeAndQuote(result.enum_name)),
_.assign({}, options, { raw: true })
));
}
pgListEnums(tableName, options) {
options = options || {};
const sql = this.QueryGenerator.pgListEnums(tableName);
return this.sequelize.query(sql, _.assign({}, options, { plain: false, raw: true, type: QueryTypes.SELECT }));
}
renameTable(before, after, options) {
options = options || {};
const sql = this.QueryGenerator.renameTableQuery(before, after);
return this.sequelize.query(sql, options);
}
showAllTables(options) {
options = _.assign({}, options, {
raw: true,
type: QueryTypes.SHOWTABLES
});
let showTablesSql = null;
if (options.schema)
showTablesSql = this.QueryGenerator.showTablesQuery(options.schema);
else
showTablesSql = this.QueryGenerator.showTablesQuery();
return this.sequelize.query(showTablesSql, options).then(tableNames => Utils._.flatten(tableNames));
}
describeTable(tableName, options) {
let schema = null;
let schemaDelimiter = null;
if (typeof options === 'string') {
schema = options;
} else if (typeof options === 'object' && options !== null) {
schema = options.schema || null;
schemaDelimiter = options.schemaDelimiter || null;
}
if (typeof tableName === 'object' && tableName !== null) {
schema = tableName.schema;
tableName = tableName.tableName;
}
const sql = this.QueryGenerator.describeTableQuery(tableName, schema, schemaDelimiter);
return this.sequelize.query(
sql,
_.assign({}, options, { type: QueryTypes.DESCRIBE })
).then(data => {
// If no data is returned from the query, then the table name may be wrong.
// Query generators that use information_schema for retrieving table info will just return an empty result set,
// it will not throw an error like built-ins do (e.g. DESCRIBE on MySql).
if (Utils._.isEmpty(data)) {
return Promise.reject('No description found for "' + tableName + '" table. Check the table name and schema; remember, they _are_ case sensitive.');
} else {
return Promise.resolve(data);
}
});
}
addColumn(table, key, attribute, options) {
if (!table || !key || !attribute) {
throw new Error('addColumn takes atleast 3 arguments (table, attribute name, attribute definition)');
}
options = options || {};
attribute = this.sequelize.normalizeAttribute(attribute);
return this.sequelize.query(this.QueryGenerator.addColumnQuery(table, key, attribute), options);
}
removeColumn(tableName, attributeName, options) {
options = options || {};
switch (this.sequelize.options.dialect) {
case 'sqlite':
// sqlite needs some special treatment as it cannot drop a column
return SQLiteQueryInterface.removeColumn.call(this, tableName, attributeName, options);
case 'mssql':
// mssql needs special treatment as it cannot drop a column with a default or foreign key constraint
return MSSSQLQueryInterface.removeColumn.call(this, tableName, attributeName, options);
case 'mysql':
// mysql needs special treatment as it cannot drop a column with a foreign key constraint
return MySQLQueryInterface.removeColumn.call(this, tableName, attributeName, options);
default:
return this.sequelize.query(this.QueryGenerator.removeColumnQuery(tableName, attributeName), options);
}
}
changeColumn(tableName, attributeName, dataTypeOrOptions, options) {
const attributes = {};
options = options || {};
if (Utils._.values(DataTypes).indexOf(dataTypeOrOptions) > -1) {
attributes[attributeName] = { type: dataTypeOrOptions, allowNull: true };
} else {
attributes[attributeName] = dataTypeOrOptions;
}
attributes[attributeName].type = this.sequelize.normalizeDataType(attributes[attributeName].type);
if (this.sequelize.options.dialect === 'sqlite') {
// sqlite needs some special treatment as it cannot change a column
return SQLiteQueryInterface.changeColumn.call(this, tableName, attributes, options);
} else {
const query = this.QueryGenerator.attributesToSQL(attributes);
const sql = this.QueryGenerator.changeColumnQuery(tableName, query);
return this.sequelize.query(sql, options);
}
}
renameColumn(tableName, attrNameBefore, attrNameAfter, options) {
options = options || {};
return this.describeTable(tableName, options).then(data => {
data = data[attrNameBefore] || {};
const _options = {};
_options[attrNameAfter] = {
attribute: attrNameAfter,
type: data.type,
allowNull: data.allowNull,
defaultValue: data.defaultValue
};
// fix: a not-null column cannot have null as default value
if (data.defaultValue === null && !data.allowNull) {
delete _options[attrNameAfter].defaultValue;
}
if (this.sequelize.options.dialect === 'sqlite') {
// sqlite needs some special treatment as it cannot rename a column
return SQLiteQueryInterface.renameColumn.call(this, tableName, attrNameBefore, attrNameAfter, options);
} else {
const sql = this.QueryGenerator.renameColumnQuery(
tableName,
attrNameBefore,
this.QueryGenerator.attributesToSQL(_options)
);
return this.sequelize.query(sql, options);
}
});
}
addIndex(tableName, attributes, options, rawTablename) {
// Support for passing tableName, attributes, options or tableName, options (with a fields param which is the attributes)
if (!Array.isArray(attributes)) {
rawTablename = options;
options = attributes;
attributes = options.fields;
}
// testhint argsConform.end
if (!rawTablename) {
// Map for backwards compat
rawTablename = tableName;
}
options = Utils.cloneDeep(options);
options.fields = attributes;
const sql = this.QueryGenerator.addIndexQuery(tableName, options, rawTablename);
return this.sequelize.query(sql, _.assign({}, options, { supportsSearchPath: false }));
}
showIndex(tableName, options) {
const sql = this.QueryGenerator.showIndexesQuery(tableName, options);
return this.sequelize.query(sql, _.assign({}, options, { type: QueryTypes.SHOWINDEXES }));
}
nameIndexes(indexes, rawTablename) {
return this.QueryGenerator.nameIndexes(indexes, rawTablename);
}
getForeignKeysForTables(tableNames, options) {
options = options || {};
if (tableNames.length === 0) {
return Promise.resolve({});
}
return Promise.map(tableNames, tableName =>
this.sequelize.query(this.QueryGenerator.getForeignKeysQuery(tableName, this.sequelize.config.database), options).get(0)
).then(results => {
const result = {};
tableNames.forEach((tableName, i) => {
if (Utils._.isObject(tableName)) {
tableName = tableName.schema + '.' + tableName.tableName;
}
result[tableName] = Utils._.compact(results[i]).map(r => r.constraint_name);
});
return result;
});
}
removeIndex(tableName, indexNameOrAttributes, options) {
options = options || {};
const sql = this.QueryGenerator.removeIndexQuery(tableName, indexNameOrAttributes);
return this.sequelize.query(sql, options);
}
insert(instance, tableName, values, options) {
options = Utils.cloneDeep(options);
options.hasTrigger = instance && instance.constructor.options.hasTrigger;
const sql = this.QueryGenerator.insertQuery(tableName, values, instance && instance.constructor.rawAttributes, options);
options.type = QueryTypes.INSERT;
options.instance = instance;
return this.sequelize.query(sql, options).then(results => {
if (instance) results[0].isNewRecord = false;
return results;
});
}
upsert(tableName, valuesByField, updateValues, where, model, options) {
const wheres = [];
const attributes = Object.keys(valuesByField);
let indexes = [];
let indexFields;
options = _.clone(options);
if (!Utils._.isEmpty(where)) {
wheres.push(where);
}
// Lets combine uniquekeys and indexes into one
indexes = Utils._.map(model.options.uniqueKeys, value => {
return value.fields;
});
Utils._.each(model.options.indexes, value => {
if (value.unique === true) {
// fields in the index may both the strings or objects with an attribute property - lets sanitize that
indexFields = Utils._.map(value.fields, field => {
if (Utils._.isPlainObject(field)) {
return field.attribute;
}
return field;
});
indexes.push(indexFields);
}
});
for (const index of indexes) {
if (Utils._.intersection(attributes, index).length === index.length) {
where = {};
for (const field of index) {
where[field] = valuesByField[field];
}
wheres.push(where);
}
}
where = { $or: wheres };
options.type = QueryTypes.UPSERT;
options.raw = true;
const sql = this.QueryGenerator.upsertQuery(tableName, valuesByField, updateValues, where, model.rawAttributes, options);
return this.sequelize.query(sql, options).then(rowCount => {
if (rowCount === undefined) {
return rowCount;
}
// MySQL returns 1 for inserted, 2 for updated http://dev.mysql.com/doc/refman/5.0/en/insert-on-duplicate.html. Postgres has been modded to do the same
return rowCount === 1;
});
}
bulkInsert(tableName, records, options, attributes) {
options = _.clone(options) || {};
options.type = QueryTypes.INSERT;
const sql = this.QueryGenerator.bulkInsertQuery(tableName, records, options, attributes);
return this.sequelize.query(sql, options).then(results => results[0]);
}
update(instance, tableName, values, identifier, options) {
options = _.clone(options || {});
options.hasTrigger = !!(instance && instance._modelOptions && instance._modelOptions.hasTrigger);
const sql = this.QueryGenerator.updateQuery(tableName, values, identifier, options, instance.constructor.rawAttributes);
let restrict = false;
options.type = QueryTypes.UPDATE;
// Check for a restrict field
if (instance.constructor && instance.constructor.associations) {
const keys = Object.keys(instance.constructor.associations);
const length = keys.length;
for (let i = 0; i < length; i++) {
if (instance.constructor.associations[keys[i]].options && instance.constructor.associations[keys[i]].options.onUpdate && instance.constructor.associations[keys[i]].options.onUpdate === 'restrict') {
restrict = true;
}
}
}
options.instance = instance;
return this.sequelize.query(sql, options);
}
bulkUpdate(tableName, values, identifier, options, attributes) {
options = Utils.cloneDeep(options);
if (typeof identifier === 'object') identifier = Utils.cloneDeep(identifier);
const sql = this.QueryGenerator.updateQuery(tableName, values, identifier, options, attributes);
const table = Utils._.isObject(tableName) ? tableName : { tableName };
const model = Utils._.find(this.sequelize.modelManager.models, { tableName: table.tableName });
options.model = model;
return this.sequelize.query(sql, options);
}
delete(instance, tableName, identifier, options) {
const cascades = [];
const sql = this.QueryGenerator.deleteQuery(tableName, identifier, null, instance.constructor);
options = _.clone(options) || {};
// Check for a restrict field
if (!!instance.constructor && !!instance.constructor.associations) {
const keys = Object.keys(instance.constructor.associations);
const length = keys.length;
let association;
for (let i = 0; i < length; i++) {
association = instance.constructor.associations[keys[i]];
if (association.options && association.options.onDelete &&
association.options.onDelete.toLowerCase() === 'cascade' &&
association.options.useHooks === true) {
cascades.push(association.accessors.get);
}
}
}
return Promise.each(cascades, cascade => {
return instance[cascade](options).then(instances => {
// Check for hasOne relationship with non-existing associate ("has zero")
if (!instances) {
return Promise.resolve();
}
if (!Array.isArray(instances)) instances = [instances];
return Promise.each(instances, instance => instance.destroy(options));
});
}).then(() => {
options.instance = instance;
return this.sequelize.query(sql, options);
});
}
bulkDelete(tableName, identifier, options, model) {
options = Utils.cloneDeep(options);
options = _.defaults(options, {limit: null});
if (typeof identifier === 'object') identifier = Utils.cloneDeep(identifier);
const sql = this.QueryGenerator.deleteQuery(tableName, identifier, options, model);
return this.sequelize.query(sql, options);
}
select(model, tableName, options) {
options = Utils.cloneDeep(options);
options.type = QueryTypes.SELECT;
options.model = model;
return this.sequelize.query(
this.QueryGenerator.selectQuery(tableName, options, model),
options
);
}
increment(instance, tableName, values, identifier, options) {
const sql = this.QueryGenerator.arithmeticQuery('+', tableName, values, identifier, options.attributes);
options = _.clone(options) || {};
options.type = QueryTypes.UPDATE;
options.instance = instance;
return this.sequelize.query(sql, options);
}
decrement(instance, tableName, values, identifier, options) {
const sql = this.QueryGenerator.arithmeticQuery('-', tableName, values, identifier, options.attributes);
options = _.clone(options) || {};
options.type = QueryTypes.UPDATE;
options.instance = instance;
return this.sequelize.query(sql, options);
}
rawSelect(tableName, options, attributeSelector, Model) {
if (options.schema) {
tableName = this.QueryGenerator.addSchema({
tableName,
_schema: options.schema
});
}
options = Utils.cloneDeep(options);
options = _.defaults(options, {
raw: true,
plain: true,
type: QueryTypes.SELECT
});
const sql = this.QueryGenerator.selectQuery(tableName, options, Model);
if (attributeSelector === undefined) {
throw new Error('Please pass an attribute selector!');
}
return this.sequelize.query(sql, options).then(data => {
if (!options.plain) {
return data;
}
let result = data ? data[attributeSelector] : null;
if (options && options.dataType) {
const dataType = options.dataType;
if (dataType instanceof DataTypes.DECIMAL || dataType instanceof DataTypes.FLOAT) {
result = parseFloat(result);
} else if (dataType instanceof DataTypes.INTEGER || dataType instanceof DataTypes.BIGINT) {
result = parseInt(result, 10);
} else if (dataType instanceof DataTypes.DATE) {
if (!Utils._.isNull(result) && !Utils._.isDate(result)) {
result = new Date(result);
}
} else if (dataType instanceof DataTypes.STRING) {
// Nothing to do, result is already a string.
}
}
return result;
});
}
createTrigger(tableName, triggerName, timingType, fireOnArray, functionName, functionParams, optionsArray, options) {
const sql = this.QueryGenerator.createTrigger(tableName, triggerName, timingType, fireOnArray, functionName, functionParams, optionsArray);
options = options || {};
if (sql) {
return this.sequelize.query(sql, options);
} else {
return Promise.resolve();
}
}
dropTrigger(tableName, triggerName, options) {
const sql = this.QueryGenerator.dropTrigger(tableName, triggerName);
options = options || {};
if (sql) {
return this.sequelize.query(sql, options);
} else {
return Promise.resolve();
}
}
renameTrigger(tableName, oldTriggerName, newTriggerName, options) {
const sql = this.QueryGenerator.renameTrigger(tableName, oldTriggerName, newTriggerName);
options = options || {};
if (sql) {
return this.sequelize.query(sql, options);
} else {
return Promise.resolve();
}
}
createFunction(functionName, params, returnType, language, body, options) {
const sql = this.QueryGenerator.createFunction(functionName, params, returnType, language, body, options);
options = options || {};
if (sql) {
return this.sequelize.query(sql, options);
} else {
return Promise.resolve();
}
}
dropFunction(functionName, params, options) {
const sql = this.QueryGenerator.dropFunction(functionName, params);
options = options || {};
if (sql) {
return this.sequelize.query(sql, options);
} else {
return Promise.resolve();
}
}
renameFunction(oldFunctionName, params, newFunctionName, options) {
const sql = this.QueryGenerator.renameFunction(oldFunctionName, params, newFunctionName);
options = options || {};
if (sql) {
return this.sequelize.query(sql, options);
} else {
return Promise.resolve();
}
}
// Helper methods useful for querying
/**
* Escape an identifier (e.g. a table or attribute name). If force is true,
* the identifier will be quoted even if the `quoteIdentifiers` option is
* false.
* @private
*/
quoteIdentifier(identifier, force) {
return this.QueryGenerator.quoteIdentifier(identifier, force);
}
quoteTable(identifier) {
return this.QueryGenerator.quoteTable(identifier);
}
/**
* Split an identifier into .-separated tokens and quote each part.
* If force is true, the identifier will be quoted even if the
* `quoteIdentifiers` option is false.
* @private
*/
quoteIdentifiers(identifiers, force) {
return this.QueryGenerator.quoteIdentifiers(identifiers, force);
}
/**
* Escape a value (e.g. a string, number or date)
* @private
*/
escape(value) {
return this.QueryGenerator.escape(value);
}
setAutocommit(transaction, value, options) {
if (!transaction || !(transaction instanceof Transaction)) {
throw new Error('Unable to set autocommit for a transaction without transaction object!');
}
if (transaction.parent) {
// Not possible to set a seperate isolation level for savepoints
return Promise.resolve();
}
options = _.assign({}, options, {
transaction: transaction.parent || transaction
});
const sql = this.QueryGenerator.setAutocommitQuery(value, {
parent: transaction.parent
});
if (!sql) return Promise.resolve();
return this.sequelize.query(sql, options);
}
setIsolationLevel(transaction, value, options) {
if (!transaction || !(transaction instanceof Transaction)) {
throw new Error('Unable to set isolation level for a transaction without transaction object!');
}
if (transaction.parent || !value) {
// Not possible to set a seperate isolation level for savepoints
return Promise.resolve();
}
options = _.assign({}, options, {
transaction: transaction.parent || transaction
});
const sql = this.QueryGenerator.setIsolationLevelQuery(value, {
parent: transaction.parent
});
if (!sql) return Promise.resolve();
return this.sequelize.query(sql, options);
}
startTransaction(transaction, options) {
if (!transaction || !(transaction instanceof Transaction)) {
throw new Error('Unable to start a transaction without transaction object!');
}
options = _.assign({}, options, {
transaction: transaction.parent || transaction
});
options.transaction.name = transaction.parent ? transaction.name : undefined;
const sql = this.QueryGenerator.startTransactionQuery(transaction);
return this.sequelize.query(sql, options);
}
<|fim▁hole|> options = _.assign({}, options, {
transaction: transaction.parent || transaction
});
const sql = this.QueryGenerator.deferConstraintsQuery(options);
if (sql) {
return this.sequelize.query(sql, options);
}
return Promise.resolve();
}
commitTransaction(transaction, options) {
if (!transaction || !(transaction instanceof Transaction)) {
throw new Error('Unable to commit a transaction without transaction object!');
}
if (transaction.parent) {
// Savepoints cannot be committed
return Promise.resolve();
}
options = _.assign({}, options, {
transaction: transaction.parent || transaction,
supportsSearchPath: false
});
const sql = this.QueryGenerator.commitTransactionQuery(transaction);
const promise = this.sequelize.query(sql, options);
transaction.finished = 'commit';
return promise;
}
rollbackTransaction(transaction, options) {
if (!transaction || !(transaction instanceof Transaction)) {
throw new Error('Unable to rollback a transaction without transaction object!');
}
options = _.assign({}, options, {
transaction: transaction.parent || transaction,
supportsSearchPath: false
});
options.transaction.name = transaction.parent ? transaction.name : undefined;
const sql = this.QueryGenerator.rollbackTransactionQuery(transaction);
const promise = this.sequelize.query(sql, options);
transaction.finished = 'rollback';
return promise;
}
}
module.exports = QueryInterface;
module.exports.QueryInterface = QueryInterface;
module.exports.default = QueryInterface;<|fim▁end|> | deferConstraints(transaction, options) { |
<|file_name|>persian.js<|end_file_name|><|fim▁begin|><|fim▁hole|>version https://git-lfs.github.com/spec/v1
oid sha256:ecdcfeb6d9c10e04bf103e6105698af7bda97180bfc3c7545cce74ce2a73f5eb
size 4291<|fim▁end|> | |
<|file_name|>test_parser.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014, 2015 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the parser engine."""
__revision__ = \
"$Id$"
import tempfile
from flask.ext.registry import PkgResourcesDirDiscoveryRegistry, \
ImportPathRegistry, RegistryProxy
from invenio.base.wrappers import lazy_import
from invenio.testsuite import make_test_suite, run_test_suite, InvenioTestCase
Field_parser = lazy_import('invenio.modules.jsonalchemy.parser:FieldParser')
Model_parser = lazy_import('invenio.modules.jsonalchemy.parser:ModelParser')
guess_legacy_field_names = lazy_import(
'invenio.modules.jsonalchemy.parser:guess_legacy_field_names')
get_producer_rules = lazy_import(
'invenio.modules.jsonalchemy.parser:get_producer_rules')
TEST_PACKAGE = 'invenio.modules.jsonalchemy.testsuite'
test_registry = RegistryProxy('testsuite', ImportPathRegistry,
initial=[TEST_PACKAGE])
field_definitions = lambda: PkgResourcesDirDiscoveryRegistry(
'fields', registry_namespace=test_registry)
model_definitions = lambda: PkgResourcesDirDiscoveryRegistry(
'models', registry_namespace=test_registry)
def clean_field_model_definitions():
Field_parser._field_definitions = {}
Field_parser._legacy_field_matchings = {}
Model_parser._model_definitions = {}
class TestParser(InvenioTestCase):
def setUp(self):
self.app.extensions['registry'][
'testsuite.fields'] = field_definitions()
self.app.extensions['registry'][
'testsuite.models'] = model_definitions()
def tearDown(self):
del self.app.extensions['registry']['testsuite.fields']
del self.app.extensions['registry']['testsuite.models']
def test_wrong_indent(self):
"""JSONAlchemy - wrong indent"""
from invenio.modules.jsonalchemy.parser import _create_field_parser
import pyparsing
parser = _create_field_parser()
test = """
foo:
creator:
bar, '1', foo()
"""
self.assertRaises(pyparsing.ParseException, parser.parseString, test)
from invenio.modules.jsonalchemy.errors import FieldParserException
tmp_file = tempfile.NamedTemporaryFile()
config = """
foo:
creator:
bar, '1', foo()
"""
tmp_file.write(config)
tmp_file.flush()
self.app.extensions['registry'][
'testsuite.fields'].register(tmp_file.name)
clean_field_model_definitions()
self.assertRaises(
FieldParserException, Field_parser.reparse, 'testsuite')
tmp_file.close()
clean_field_model_definitions()
def test_wrong_field_definitions(self):
"""JSONAlchemy - wrong field definitions"""
from invenio.modules.jsonalchemy.errors import FieldParserException
tmp_file_4 = tempfile.NamedTemporaryFile()
config_4 = '''
title:
creator:
marc, '245__', value
'''
tmp_file_4.write(config_4)
tmp_file_4.flush()
clean_field_model_definitions()
self.app.extensions['registry'][
'testsuite.fields'].register(tmp_file_4.name)
self.assertRaises(
FieldParserException, Field_parser.reparse, 'testsuite')
tmp_file_4.close()
clean_field_model_definitions()
def test_wrong_field_inheritance(self):
"""JSONAlchmey - not parent field definition"""
from invenio.modules.jsonalchemy.errors import FieldParserException
tmp_file_5 = tempfile.NamedTemporaryFile()
config_5 = '''
@extend
wrong_field:
""" Desc """
'''
tmp_file_5.write(config_5)
tmp_file_5.flush()
clean_field_model_definitions()
self.app.extensions['registry'][
'testsuite.fields'].register(tmp_file_5.name)
self.assertRaises(
FieldParserException, Field_parser.reparse, 'testsuite')
tmp_file_5.close()
clean_field_model_definitions()
def test_field_rules(self):
"""JsonAlchemy - field parser"""
self.assertTrue(len(Field_parser.field_definitions('testsuite')) >= 22)
# Check that all files are parsed
self.assertTrue(
'authors' in Field_parser.field_definitions('testsuite'))
self.assertTrue('title' in Field_parser.field_definitions('testsuite'))
# Check work around for [n] and [0]
self.assertTrue(
Field_parser.field_definitions('testsuite')['doi']['pid'])
# Check if derived and calulated are well parserd
self.assertTrue('dummy' in Field_parser.field_definitions('testsuite'))
self.assertEquals(
Field_parser.field_definitions('testsuite')['dummy']['pid'], 2)
self.assertEquals(Field_parser.field_definitions(
'testsuite')['dummy']['rules'].keys(), ['json', 'derived'])
self.assertTrue(
len(Field_parser.field_definitions(
'testsuite')['dummy']['producer']
),
2
)
self.assertTrue(Field_parser.field_definitions('testsuite')['_random'])
# Check override
value = {'a': 'a', 'b': 'b', 'k': 'k'} # noqa
self.assertEquals(
eval(Field_parser.field_definitions('testsuite')
['title']['rules']['marc'][1]['function']),
{'form': 'k', 'subtitle': 'b', 'title': 'a'})
# Check extras
self.assertTrue(
'json_ext' in
Field_parser.field_definitions('testsuite')['modification_date']
)
tmp = Field_parser.field_definitions('testsuite')
Field_parser.reparse('testsuite')
self.assertEquals(
len(Field_parser.field_definitions('testsuite')), len(tmp))
def test_field_hidden_decorator(self):
"""JsonAlchemy - field hidden decorator."""
# Check that all files are parsed
self.assertTrue(
'hidden_basic' in Field_parser.field_definitions('testsuite'))
# Check default hidden value
self.assertFalse(
Field_parser.field_definitions('testsuite')['_id']['hidden'])
# Check hidden field
self.assertTrue(Field_parser.field_definitions(
'testsuite')['hidden_basic']['hidden'])
def test_wrong_field_name_inside_model(self):
"""JSONAlchmey - wrong field name inside model"""
from invenio.modules.jsonalchemy.errors import ModelParserException
tmp_file_8 = tempfile.NamedTemporaryFile()
config_8 = '''
fields:
not_existing_field
'''
tmp_file_8.write(config_8)
tmp_file_8.flush()
clean_field_model_definitions()
self.app.extensions['registry'][
'testsuite.models'].register(tmp_file_8.name)
self.assertRaises(
ModelParserException, Model_parser.reparse, 'testsuite')
tmp_file_8.close()
clean_field_model_definitions()
def test_model_definitions(self):
"""JsonAlchemy - model parser"""
clean_field_model_definitions()
self.assertTrue(len(Model_parser.model_definitions('testsuite')) >= 2)
self.assertTrue(
'test_base' in Model_parser.model_definitions('testsuite'))
tmp = Model_parser.model_definitions('testsuite')
Model_parser.reparse('testsuite')
self.assertEquals(
len(Model_parser.model_definitions('testsuite')), len(tmp))
clean_field_model_definitions()
def test_resolve_several_models(self):
"""JSONAlchemy - test resolve several models"""
test_model = Model_parser.model_definitions('testsuite')['test_model']
clean_field_model_definitions()
self.assertEquals(
Model_parser.resolve_models('test_model', 'testsuite')['fields'],
test_model['fields'])<|fim▁hole|> clean_field_model_definitions()
def test_field_name_model_based(self):
"""JSONAlchemy - field name model based"""
clean_field_model_definitions()
field_model_def = Field_parser.field_definition_model_based(
'title', 'test_model', 'testsuite')
field_def = Field_parser.field_definitions('testsuite')['title_title']
value = {'a': 'Awesome title', 'b': 'sub title', 'k': 'form'}
from invenio.base.utils import try_to_eval
self.assertEqual(
try_to_eval(field_model_def['rules'][
'marc'][0]['function'], value=value),
try_to_eval(field_def['rules']['marc'][0]['function'],
value=value))
clean_field_model_definitions()
def test_guess_legacy_field_names(self):
"""JsonAlchemy - check legacy field names"""
self.assertEquals(
guess_legacy_field_names(('100__a', '245'), 'marc', 'testsuite'),
{'100__a': ['_first_author.full_name'], '245': ['title']})
self.assertEquals(
guess_legacy_field_names('foo', 'bar', 'baz'), {'foo': []})
def test_get_producer_rules(self):
"""JsonAlchemy - check producer rules"""
clean_field_model_definitions()
self.assertEquals(
len(get_producer_rules('keywords', 'json_for_marc', 'testsuite')),
1
)
self.assertRaises(
KeyError,
lambda: get_producer_rules('foo', 'json_for_marc', 'testsuite'))
clean_field_model_definitions()
TEST_SUITE = make_test_suite(TestParser)
if __name__ == '__main__':
run_test_suite(TEST_SUITE)<|fim▁end|> | self.assertEquals(
Model_parser.resolve_models(
['test_base', 'test_model'], 'testsuite')['fields'],
test_model['fields']) |
<|file_name|>app.interface.ts<|end_file_name|><|fim▁begin|>'use strict';
/**
* Express imports
*/
import { Application } from "express-serve-static-core";
<|fim▁hole|>
/**
* SubApp interface designed for all *Express classes,
* actually describing a sub-app, from the Express point of view.
* Make sure all these classes implement it
*/
export interface SubApp {
app: Application;
debug: debug.IDebugger;
init(): void;
configureEnv(): void;
configureLocals(): void;
configureMiddleware(): void;
configureRouting(): void;
configureErrorHandler(): void;
}
/**
* App interface designed for the only *Express class,
* actually describing the main app, from the Express point of view.
* Make sure that class implements it
*/
export interface App extends SubApp {
viewsFolder: string;
configureViewEngine(): void;
configureDB(): void;
}<|fim▁end|> | /**
* Third-party imports ( https://www.npmjs.com/~types )
*/
import * as debug from "debug"; |
<|file_name|>qgoogleauthenticator.hpp<|end_file_name|><|fim▁begin|>#ifndef QGOOGLEAUTHENTICATOR_H
#define QGOOGLEAUTHENTICATOR_H<|fim▁hole|>#include <QDateTime>
#include <QtEndian>
#include "base32.h"
#include "hmac.h"
#define SECRET_LENGTH 16
#define VERIFICATION_CODE_MODULUS (1000*1000) // Six digits
class QGOOGLEAUTHENTICATORSHARED_EXPORT QGoogleAuthenticator
{
public:
static QString getCode(QByteArray secret, quint64 time = 0);
static bool checkCode(QString code, QByteArray secret);
static QByteArray generate_secret();
};
#endif // QGOOGLEAUTHENTICATOR_H<|fim▁end|> |
#include "qgoogleauthenticator_global.h"
|
<|file_name|>dependency_loader.ts<|end_file_name|><|fim▁begin|>import {ScriptReceivers, ScriptReceiverFactory} from './script_receiver_factory';
import Runtime from 'runtime';
import ScriptRequest from './script_request';
/** Handles loading dependency files.<|fim▁hole|> * not. It is caller's responsibility to make sure the resource is not loaded
* twice. This is because it's impossible to detect resource loading status
* without knowing its content.
*
* Options:
* - cdn_http - url to HTTP CND
* - cdn_https - url to HTTPS CDN
* - version - version of pusher-js
* - suffix - suffix appended to all names of dependency files
*
* @param {Object} options
*/
export default class DependencyLoader {
options: any;
receivers: ScriptReceiverFactory;
loading: any;
constructor(options : any) {
this.options = options;
this.receivers = options.receivers || ScriptReceivers;
this.loading = {};
}
/** Loads the dependency from CDN.
*
* @param {String} name
* @param {Function} callback
*/
load(name : string, options: any, callback : Function) {
var self = this;
if (self.loading[name] && self.loading[name].length > 0) {
self.loading[name].push(callback);
} else {
self.loading[name] = [callback];
var request = Runtime.createScriptRequest(self.getPath(name, options));
var receiver = self.receivers.create(function(error) {
self.receivers.remove(receiver);
if (self.loading[name]) {
var callbacks = self.loading[name];
delete self.loading[name];
var successCallback = function(wasSuccessful) {
if (!wasSuccessful) {
request.cleanup();
}
};
for (var i = 0; i < callbacks.length; i++) {
callbacks[i](error, successCallback);
}
}
});
request.send(receiver);
}
}
/** Returns a root URL for pusher-js CDN.
*
* @returns {String}
*/
getRoot(options : any) : string {
var cdn;
var protocol = Runtime.getDocument().location.protocol;
if ((options && options.encrypted) || protocol === "https:") {
cdn = this.options.cdn_https;
} else {
cdn = this.options.cdn_http;
}
// make sure there are no double slashes
return cdn.replace(/\/*$/, "") + "/" + this.options.version;
}
/** Returns a full path to a dependency file.
*
* @param {String} name
* @returns {String}
*/
getPath(name : string, options : any) : string {
return this.getRoot(options) + '/' + name + this.options.suffix + '.js';
};
}<|fim▁end|> | *
* Dependency loaders don't remember whether a resource has been loaded or |
<|file_name|>Client.py<|end_file_name|><|fim▁begin|>from socket import *
import sys
clientSocket = socket(AF_INET, SOCK_STREAM) #creates socket
server_address = ('127.0.0.1', 80)#create connection at this given port
<|fim▁hole|>filename=raw_input("ENTER THE FILENAME: ")
f = open(filename[0:])
outputdata = f.read()#read the input file into variable
print "HTML CODE OF THE GIVEN FILE:", outputdata #display the html code of the file
clientSocket.close() #close the connection<|fim▁end|> | print >>sys.stderr, 'CONNECTING TO %s AT PORT %s' % server_address
clientSocket.connect(server_address)#connect to server at given address |
<|file_name|>tika.js<|end_file_name|><|fim▁begin|>/*jshint node:true*/
/*global test, suite, setup, teardown*/
'use strict';
var assert = require('assert');
var tika = require('../');
suite('document tests', function() {
test('detect txt content-type', function(done) {
tika.type('test/data/file.txt', function(err, contentType) {
assert.ifError(err);
assert.equal(typeof contentType, 'string');
assert.equal(contentType, 'text/plain');
done();
});
});
test('detect txt content-type and charset', function(done) {
tika.typeAndCharset('test/data/file.txt', function(err, contentType) {
assert.ifError(err);
assert.equal(typeof contentType, 'string');
assert.equal(contentType, 'text/plain; charset=ISO-8859-1');
done();
});
});
test('extract from txt', function(done) {
tika.text('test/data/file.txt', function(err, text) {
assert.ifError(err);
assert.equal(typeof text, 'string');
assert.equal(text, 'Just some text.\n\n');
done();
});
});
test('extract meta from txt', function(done) {
tika.meta('test/data/file.txt', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.equal(typeof meta.resourceName[0], 'string');
assert.deepEqual(meta.resourceName, ['file.txt']);
assert.deepEqual(meta['Content-Type'], ['text/plain; charset=ISO-8859-1']);
assert.deepEqual(meta['Content-Encoding'], ['ISO-8859-1']);
done();
});
});
test('extract meta and text from txt', function(done) {
tika.extract('test/data/file.txt', function(err, text, meta) {
assert.ifError(err);
assert.equal(typeof text, 'string');
assert.equal(text, 'Just some text.\n\n');
assert.ok(meta);
assert.equal(typeof meta.resourceName[0], 'string');
assert.deepEqual(meta.resourceName, ['file.txt']);
assert.deepEqual(meta['Content-Type'], ['text/plain; charset=ISO-8859-1']);
assert.deepEqual(meta['Content-Encoding'], ['ISO-8859-1']);
done();<|fim▁hole|> test('extract from extensionless txt', function(done) {
tika.text('test/data/extensionless/txt', function(err, text) {
assert.ifError(err);
assert.equal(text, 'Just some text.\n\n');
done();
});
});
test('extract from doc', function(done) {
tika.text('test/data/file.doc', function(err, text) {
assert.ifError(err);
assert.equal(text, 'Just some text.\n');
done();
});
});
test('extract meta from doc', function(done) {
tika.meta('test/data/file.doc', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['file.doc']);
assert.deepEqual(meta['Content-Type'], ['application/msword']);
assert.deepEqual(meta['dcterms:created'], ['2013-12-06T21:15:26Z']);
done();
});
});
test('extract from extensionless doc', function(done) {
tika.text('test/data/extensionless/doc', function(err, text) {
assert.ifError(err);
assert.equal(text, 'Just some text.\n');
done();
});
});
test('extract from docx', function(done) {
tika.text('test/data/file.docx', function(err, text) {
assert.ifError(err);
assert.equal(text, 'Just some text.\n');
done();
});
});
test('extract meta from docx', function(done) {
tika.meta('test/data/file.docx', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['file.docx']);
assert.deepEqual(meta['Content-Type'], ['application/vnd.openxmlformats-officedocument.wordprocessingml.document']);
assert.deepEqual(meta['Application-Name'], ['LibreOffice/4.1.3.2$MacOSX_x86 LibreOffice_project/70feb7d99726f064edab4605a8ab840c50ec57a']);
done();
});
});
test('extract from extensionless docx', function(done) {
tika.text('test/data/extensionless/docx', function(err, text) {
assert.ifError(err);
assert.equal(text, 'Just some text.\n');
done();
});
});
test('extract meta from extensionless docx', function(done) {
tika.meta('test/data/extensionless/docx', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['docx']);
assert.deepEqual(meta['Content-Type'], ['application/vnd.openxmlformats-officedocument.wordprocessingml.document']);
assert.deepEqual(meta['Application-Name'], ['LibreOffice/4.1.3.2$MacOSX_x86 LibreOffice_project/70feb7d99726f064edab4605a8ab840c50ec57a']);
done();
});
});
test('extract from pdf', function(done) {
tika.text('test/data/file.pdf', function(err, text) {
assert.ifError(err);
assert.equal(text.trim(), 'Just some text.');
done();
});
});
test('detect content-type of pdf', function(done) {
tika.type('test/data/file.pdf', function(err, contentType) {
assert.ifError(err);
assert.equal(contentType, 'application/pdf');
done();
});
});
test('extract meta from pdf', function(done) {
tika.meta('test/data/file.pdf', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['file.pdf']);
assert.deepEqual(meta['Content-Type'], ['application/pdf']);
assert.deepEqual(meta.producer, ['LibreOffice 4.1']);
done();
});
});
test('extract from extensionless pdf', function(done) {
tika.text('test/data/extensionless/pdf', function(err, text) {
assert.ifError(err);
assert.equal(text.trim(), 'Just some text.');
done();
});
});
test('extract meta from extensionless pdf', function(done) {
tika.meta('test/data/extensionless/pdf', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['pdf']);
assert.deepEqual(meta['Content-Type'], ['application/pdf']);
assert.deepEqual(meta.producer, ['LibreOffice 4.1']);
done();
});
});
test('extract from protected pdf', function(done) {
tika.text('test/data/protected/file.pdf', function(err, text) {
assert.ifError(err);
assert.equal(text.trim(), 'Just some text.');
done();
});
});
test('extract meta from protected pdf', function(done) {
tika.meta('test/data/protected/file.pdf', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['file.pdf']);
assert.deepEqual(meta['Content-Type'], ['application/pdf']);
assert.deepEqual(meta.producer, ['LibreOffice 4.1']);
done();
});
});
});
suite('partial document extraction tests', function() {
test('extract from long txt', function(done) {
tika.text('test/data/big/file.txt', { maxLength: 10 }, function(err, text) {
assert.ifError(err);
assert.equal(text.length, 10);
assert.equal(text, 'Lorem ipsu');
done();
});
});
test('extract from pdf', function(done) {
tika.text('test/data/file.pdf', { maxLength: 10 }, function(err, text) {
assert.ifError(err);
assert.equal(text.length, 10);
assert.equal(text.trim(), 'Just some');
done();
});
});
});
suite('obscure document tests', function() {
test('extract from Word 2003 XML', function(done) {
tika.text('test/data/obscure/word2003.xml', function(err, text) {
assert.ifError(err);
assert.ok(-1 !== text.indexOf('Just some text.'));
assert.ok(-1 === text.indexOf('<?xml'));
done();
});
});
});
suite('structured data tests', function() {
test('extract from plain XML', function(done) {
tika.text('test/data/structured/plain.xml', function(err, text) {
assert.ifError(err);
assert.ok(-1 !== text.indexOf('Just some text.'));
assert.ok(-1 === text.indexOf('<?xml'));
done();
});
});
});
suite('image tests', function() {
test('extract from png', function(done) {
tika.text('test/data/file.png', function(err, text) {
assert.ifError(err);
assert.equal(text, '');
done();
});
});
test('extract from extensionless png', function(done) {
tika.text('test/data/extensionless/png', function(err, text) {
assert.ifError(err);
assert.equal(text, '');
done();
});
});
test('extract from gif', function(done) {
tika.text('test/data/file.gif', function(err, text) {
assert.ifError(err);
assert.equal(text, '');
done();
});
});
test('extract meta from gif', function(done) {
tika.meta('test/data/file.gif', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['file.gif']);
assert.deepEqual(meta['Content-Type'], ['image/gif']);
assert.deepEqual(meta['Dimension ImageOrientation'], ['Normal']);
done();
});
});
test('extract from extensionless gif', function(done) {
tika.text('test/data/extensionless/gif', function(err, text) {
assert.ifError(err);
assert.equal(text, '');
done();
});
});
test('extract meta from extensionless gif', function(done) {
tika.meta('test/data/extensionless/gif', function(err, meta) {
assert.ifError(err);
assert.ok(meta);
assert.deepEqual(meta.resourceName, ['gif']);
assert.deepEqual(meta['Content-Type'], ['image/gif']);
assert.deepEqual(meta['Dimension ImageOrientation'], ['Normal']);
done();
});
});
});
suite('non-utf8 encoded document tests', function() {
test('extract Windows Latin 1 text', function(done) {
tika.text('test/data/nonutf8/windows-latin1.txt', function(err, text) {
assert.ifError(err);
assert.equal(text, 'Algún pequeño trozo de texto.\n\n');
done();
});
});
test('detect Windows Latin 1 text charset', function(done) {
tika.charset('test/data/nonutf8/windows-latin1.txt', function(err, charset) {
assert.ifError(err);
assert.equal(typeof charset, 'string');
assert.equal(charset, 'ISO-8859-1');
done();
});
});
test('detect Windows Latin 1 text content-type and charset', function(done) {
tika.typeAndCharset('test/data/nonutf8/windows-latin1.txt', function(err, contentType) {
assert.ifError(err);
assert.equal(contentType, 'text/plain; charset=ISO-8859-1');
done();
});
});
test('extract UTF-16 English-language text', function(done) {
tika.text('test/data/nonutf8/utf16-english.txt', function(err, text) {
assert.ifError(err);
assert.equal(text, 'Just some text.\n\n');
done();
});
});
test('detect UTF-16 English-language text charset', function(done) {
tika.charset('test/data/nonutf8/utf16-english.txt', function(err, charset) {
assert.ifError(err);
assert.equal(charset, 'UTF-16LE');
done();
});
});
test('detect UTF-16 English-language text content-type and charset', function(done) {
tika.typeAndCharset('test/data/nonutf8/utf16-english.txt', function(err, contentType) {
assert.ifError(err);
assert.equal(contentType, 'text/plain; charset=UTF-16LE');
done();
});
});
test('extract UTF-16 Chinese (Simplified) text', function(done) {
tika.text('test/data/nonutf8/utf16-chinese.txt', function(err, text) {
assert.ifError(err);
assert.equal(text, '\u53ea\u662f\u4e00\u4e9b\u6587\u5b57\u3002\n\n');
done();
});
});
test('detect UTF-16 Chinese (Simplified) text charset', function(done) {
tika.charset('test/data/nonutf8/utf16-chinese.txt', function(err, charset) {
assert.ifError(err);
assert.equal(charset, 'UTF-16LE');
done();
});
});
test('detect UTF-16 Chinese (Simplified) text content-type and charset', function(done) {
tika.typeAndCharset('test/data/nonutf8/utf16-chinese.txt', function(err, contentType) {
assert.ifError(err);
assert.equal(contentType, 'text/plain; charset=UTF-16LE');
done();
});
});
});
suite('archive tests', function() {
test('extract from compressed archive', function(done) {
tika.text('test/data/archive/files.zip', function(err, text) {
assert.ifError(err);
assert.equal(text.trim(), 'file1.txt\nSome text 1.\n\n\n\n\nfile2.txt\nSome text 2.\n\n\n\n\nfile3.txt\nSome text 3.');
done();
});
});
test('extract from compressed zlib archive', function(done) {
tika.text('test/data/archive/files.zlib', function(err, text) {
assert.ifError(err);
assert.equal(text.trim(), 'files\nSome text 1.\nSome text 2.\nSome text 3.');
done();
});
});
test('detect compressed archive content-type', function(done) {
tika.type('test/data/archive/files.zip', function(err, contentType) {
assert.ifError(err);
assert.equal(contentType, 'application/zip');
done();
});
});
test('extract from twice compressed archive', function(done) {
tika.text('test/data/archive/files-files.zip', function(err, text) {
assert.ifError(err);
assert.equal(text.trim(), 'file4.txt\nSome text 4.\n\n\n\n\nfile5.txt\nSome text 5.\n\n\n\n\nfile6.txt\nSome text 6.\n\n\n\n\nfiles.zip\n\n\nfile1.txt\n\nSome text 1.\n\n\n\n\n\n\n\nfile2.txt\n\nSome text 2.\n\n\n\n\n\n\n\nfile3.txt\n\nSome text 3.');
done();
});
});
});
suite('encrypted doc tests', function() {
test('detect encrypted pdf content-type', function(done) {
tika.type('test/data/encrypted/file.pdf', function(err, contentType) {
assert.ifError(err);
assert.equal(contentType, 'application/pdf');
done();
});
});
test('detect encrypted doc content-type', function(done) {
tika.type('test/data/encrypted/file.doc', function(err, contentType) {
assert.ifError(err);
assert.equal(contentType, 'application/msword');
done();
});
});
test('specify password to decrypt document', function(done) {
tika.text('test/data/encrypted/file.pdf', {
password: 'password'
}, function(err, text) {
assert.ifError(err);
assert.equal(text.trim(), 'Just some text.');
done();
});
});
});
suite('error handling tests', function() {
test('extract from encrypted doc', function(done) {
tika.text('test/data/encrypted/file.doc', function(err, text) {
assert.ok(err);
assert.ok(-1 !== err.toString().indexOf('EncryptedDocumentException: Cannot process encrypted word file'));
done();
});
});
test('extract from encrypted pdf', function(done) {
tika.text('test/data/encrypted/file.pdf', function(err, text) {
assert.ok(err);
assert.ok(-1 !== err.toString().indexOf('Unable to process: document is encrypted'));
done();
});
});
});
suite('http extraction tests', function() {
test('extract from pdf over http', function(done) {
tika.text('http://www.ohchr.org/EN/UDHR/Documents/UDHR_Translations/eng.pdf', function(err, text) {
assert.ifError(err);
assert.ok(-1 !== text.indexOf('Universal Declaration of Human Rights'));
done();
});
});
});
suite('ftp extraction tests', function() {
test('extract from text file over ftp', function(done) {
tika.text('ftp://ftp.ed.ac.uk/INSTRUCTIONS-FOR-USING-THIS-SERVICE', function(err, text) {
assert.ifError(err);
assert.ok(-1 !== text.indexOf('This service is managed by Information Services'));
done();
});
});
});
suite('language detection tests', function() {
test('detect English text', function(done) {
tika.language('This just some text in English.', function(err, language, reasonablyCertain) {
assert.ifError(err);
assert.equal(typeof language, 'string');
assert.equal(typeof reasonablyCertain, 'boolean');
assert.equal(language, 'en');
done();
});
});
});<|fim▁end|> | });
});
|
<|file_name|>proc-macro.rs<|end_file_name|><|fim▁begin|>extern crate cargotest;
extern crate hamcrest;
use cargotest::is_nightly;
use cargotest::support::{project, execs};
use hamcrest::assert_that;
#[test]
fn probe_cfg_before_crate_type_discovery() {
if !is_nightly() {
return;
}
let client = project("client")
.file("Cargo.toml", r#"
[package]
name = "client"
version = "0.0.1"
authors = []
[target.'cfg(not(stage300))'.dependencies.noop]
path = "../noop"
"#)
.file("src/main.rs", r#"
#![feature(proc_macro)]
#[macro_use]
extern crate noop;
#[derive(Noop)]
struct X;
fn main() {}
"#)
.build();
let _noop = project("noop")
.file("Cargo.toml", r#"
[package]
name = "noop"
version = "0.0.1"
authors = []
[lib]
proc-macro = true
"#)
.file("src/lib.rs", r#"
#![feature(proc_macro, proc_macro_lib)]
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(Noop)]
pub fn noop(_input: TokenStream) -> TokenStream {
"".parse().unwrap()
}
"#)
.build();
assert_that(client.cargo("build"),
execs().with_status(0));
}
#[test]
fn noop() {
if !is_nightly() {
return;
}
let client = project("client")
.file("Cargo.toml", r#"
[package]
name = "client"
version = "0.0.1"
authors = []
[dependencies.noop]
path = "../noop"
"#)
.file("src/main.rs", r#"
#![feature(proc_macro)]
#[macro_use]
extern crate noop;
#[derive(Noop)]
struct X;
fn main() {}
"#)
.build();
let _noop = project("noop")
.file("Cargo.toml", r#"
[package]
name = "noop"
version = "0.0.1"
authors = []
[lib]
proc-macro = true
"#)
.file("src/lib.rs", r#"
#![feature(proc_macro, proc_macro_lib)]
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(Noop)]
pub fn noop(_input: TokenStream) -> TokenStream {
"".parse().unwrap()
}
"#)
.build();
assert_that(client.cargo("build"),
execs().with_status(0));
assert_that(client.cargo("build"),
execs().with_status(0));
}
#[test]
fn impl_and_derive() {
if !is_nightly() {
return;
}
let client = project("client")
.file("Cargo.toml", r#"
[package]
name = "client"
version = "0.0.1"
authors = []
[dependencies.transmogrify]
path = "../transmogrify"
"#)
.file("src/main.rs", r#"
#![feature(proc_macro)]
#[macro_use]
extern crate transmogrify;
trait ImplByTransmogrify {
fn impl_by_transmogrify(&self) -> bool;
}
#[derive(Transmogrify, Debug)]
struct X { success: bool }
fn main() {
let x = X::new();
assert!(x.impl_by_transmogrify());
println!("{:?}", x);
}
"#)
.build();
let _transmogrify = project("transmogrify")
.file("Cargo.toml", r#"
[package]
name = "transmogrify"
version = "0.0.1"
authors = []
[lib]
proc-macro = true
"#)
.file("src/lib.rs", r#"
#![feature(proc_macro, proc_macro_lib)]
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(Transmogrify)]
#[doc(hidden)]
pub fn transmogrify(input: TokenStream) -> TokenStream {
"
impl X {
fn new() -> Self {
X { success: true }
}
}
impl ImplByTransmogrify for X {
fn impl_by_transmogrify(&self) -> bool {
true
}
}
".parse().unwrap()
}
"#)
.build();
assert_that(client.cargo("build"),
execs().with_status(0));
assert_that(client.cargo("run"),
execs().with_status(0).with_stdout("X { success: true }"));
}
#[test]
fn plugin_and_proc_macro() {
if !is_nightly() {
return;
}
let questionable = project("questionable")
.file("Cargo.toml", r#"
[package]
name = "questionable"
version = "0.0.1"
authors = []
[lib]
plugin = true
proc-macro = true
"#)
.file("src/lib.rs", r#"
#![feature(plugin_registrar, rustc_private)]
#![feature(proc_macro, proc_macro_lib)]
extern crate rustc_plugin;
use rustc_plugin::Registry;
extern crate proc_macro;
use proc_macro::TokenStream;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {}
#[proc_macro_derive(Questionable)]
pub fn questionable(input: TokenStream) -> TokenStream {
input
}
"#)
.build();
let msg = " lib.plugin and lib.proc-macro cannot both be true";
assert_that(questionable.cargo("build"),
execs().with_status(101).with_stderr_contains(msg));
}
#[test]
fn proc_macro_doctest() {
if !is_nightly() {
return
}
let foo = project("foo")<|fim▁hole|> [package]
name = "foo"
version = "0.1.0"
authors = []
[lib]
proc-macro = true
"#)
.file("src/lib.rs", r#"
#![feature(proc_macro, proc_macro_lib)]
#![crate_type = "proc-macro"]
extern crate proc_macro;
use proc_macro::TokenStream;
/// ```
/// assert!(true);
/// ```
#[proc_macro_derive(Bar)]
pub fn derive(_input: TokenStream) -> TokenStream {
"".parse().unwrap()
}
#[test]
fn a() {
assert!(true);
}
"#)
.build();
assert_that(foo.cargo("test"),
execs().with_status(0)
.with_stdout_contains("test a ... ok")
.with_stdout_contains_n("test [..] ... ok", 2));
}<|fim▁end|> | .file("Cargo.toml", r#" |
<|file_name|>Action.java<|end_file_name|><|fim▁begin|>package com.smartgwt.mobile.client.widgets;
import com.google.gwt.resources.client.ImageResource;
public abstract class Action {
private ImageResource icon;
private int iconSize;
private String title;
private String tooltip;
public Action(String title) {<|fim▁hole|> }
public Action(ImageResource icon) {
this.icon = icon;
}
public Action(String title, ImageResource icon) {
this(title);
this.icon = icon;
}
public Action(String title, ImageResource icon, int iconSize) {
this(title, icon);
this.iconSize = iconSize;
}
public Action(String title, ImageResource icon, int iconSize, String tooltip) {
this(title, icon, iconSize);
this.tooltip = tooltip;
}
public final ImageResource getIcon() {
return icon;
}
public final int getIconSize() {
return iconSize;
}
public final String getTitle() {
return title;
}
public final String getTooltip() {
return tooltip;
}
public abstract void execute(ActionContext context);
}<|fim▁end|> | this.title = title; |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from modeltranslation.admin import TranslationAdmin
from .models import Campaign<|fim▁hole|>class CampaignAdmin(TranslationAdmin):
list_display = ("__str__", "url", "image", "active")
admin.site.register(Campaign, CampaignAdmin)<|fim▁end|> | |
<|file_name|>host_file_system_iterator_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from copy import deepcopy
import unittest
from host_file_system_provider import HostFileSystemProvider
from host_file_system_iterator import HostFileSystemIterator
from object_store_creator import ObjectStoreCreator
from test_branch_utility import TestBranchUtility
from test_data.canned_data import CANNED_API_FILE_SYSTEM_DATA
from test_file_system import TestFileSystem
def _GetIterationTracker(version):
'''Adds the ChannelInfo object from each iteration to a list, and signals the
loop to stop when |version| is reached.
'''
iterations = []
def callback(file_system, channel_info):
if channel_info.version == version:
return False
iterations.append(channel_info)
return True
return (iterations, callback)
class HostFileSystemIteratorTest(unittest.TestCase):
def setUp(self):
def host_file_system_constructor(branch, **optargs):
return TestFileSystem(deepcopy(CANNED_API_FILE_SYSTEM_DATA[branch]))
host_file_system_provider = HostFileSystemProvider(
ObjectStoreCreator.ForTest(),
constructor_for_test=host_file_system_constructor)
self._branch_utility = TestBranchUtility.CreateWithCannedData()
self._iterator = HostFileSystemIterator(
host_file_system_provider,
self._branch_utility)
def _GetStableChannelInfo(self,version):
return self._branch_utility.GetStableChannelInfo(version)
def _GetChannelInfo(self, channel_name):
return self._branch_utility.GetChannelInfo(channel_name)
def testAscending(self):
# Start at |stable| version 5, and move up towards |master|.
# Total: 28 file systems.
iterations, callback = _GetIterationTracker(0)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
self._GetChannelInfo('master'))
self.assertEqual(len(iterations), 28)
# Start at |stable| version 5, and move up towards |master|. The callback
# fails at |beta|, so the last successful callback was the latest version
# of |stable|. Total: 25 file systems.
iterations, callback = _GetIterationTracker(
self._GetChannelInfo('beta').version)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
self._GetChannelInfo('stable'))
self.assertEqual(len(iterations), 25)
# Start at |stable| version 5, and the callback fails immediately. Since
# no file systems are successfully processed, expect a return of None.
iterations, callback = _GetIterationTracker(5)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
None)
self.assertEqual([], iterations)
# Start at |stable| version 5, and the callback fails at version 6.
# The return should represent |stable| version 5.
iterations, callback = _GetIterationTracker(6)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
self._GetStableChannelInfo(5))
self.assertEqual([self._GetStableChannelInfo(5)], iterations)
# Start at the latest version of |stable|, and the callback fails at
# |master|. Total: 3 file systems.
iterations, callback = _GetIterationTracker('master')
self.assertEqual(
self._iterator.Ascending(self._GetChannelInfo('stable'), callback),
self._GetChannelInfo('dev'))
self.assertEqual([self._GetChannelInfo('stable'),
self._GetChannelInfo('beta'),
self._GetChannelInfo('dev')], iterations)
# Start at |stable| version 10, and the callback fails at |master|.
iterations, callback = _GetIterationTracker('master')
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(10), callback),
self._GetChannelInfo('dev'))
self.assertEqual([self._GetStableChannelInfo(10),
self._GetStableChannelInfo(11),
self._GetStableChannelInfo(12),<|fim▁hole|> self._GetStableChannelInfo(17),
self._GetStableChannelInfo(18),
self._GetStableChannelInfo(19),
self._GetStableChannelInfo(20),
self._GetStableChannelInfo(21),
self._GetStableChannelInfo(22),
self._GetStableChannelInfo(23),
self._GetStableChannelInfo(24),
self._GetStableChannelInfo(25),
self._GetStableChannelInfo(26),
self._GetStableChannelInfo(27),
self._GetStableChannelInfo(28),
self._GetChannelInfo('stable'),
self._GetChannelInfo('beta'),
self._GetChannelInfo('dev')], iterations)
def testDescending(self):
# Start at |master|, and the callback fails immediately. No file systems
# are successfully processed, so Descending() will return None.
iterations, callback = _GetIterationTracker('master')
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('master'), callback),
None)
self.assertEqual([], iterations)
# Start at |master|, and the callback fails at |dev|. Last good iteration
# should be |master|.
iterations, callback = _GetIterationTracker(
self._GetChannelInfo('dev').version)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('master'), callback),
self._GetChannelInfo('master'))
self.assertEqual([self._GetChannelInfo('master')], iterations)
# Start at |master|, and then move from |dev| down to |stable| at version 5.
# Total: 28 file systems.
iterations, callback = _GetIterationTracker(0)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('master'), callback),
self._GetStableChannelInfo(5))
self.assertEqual(len(iterations), 28)
# Start at the latest version of |stable|, and move down to |stable| at
# version 5. Total: 25 file systems.
iterations, callback = _GetIterationTracker(0)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('stable'), callback),
self._GetStableChannelInfo(5))
self.assertEqual(len(iterations), 25)
# Start at |dev| and iterate down through |stable| versions. The callback
# fails at version 10. Total: 18 file systems.
iterations, callback = _GetIterationTracker(10)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('dev'), callback),
self._GetStableChannelInfo(11))
self.assertEqual([self._GetChannelInfo('dev'),
self._GetChannelInfo('beta'),
self._GetChannelInfo('stable'),
self._GetStableChannelInfo(28),
self._GetStableChannelInfo(27),
self._GetStableChannelInfo(26),
self._GetStableChannelInfo(25),
self._GetStableChannelInfo(24),
self._GetStableChannelInfo(23),
self._GetStableChannelInfo(22),
self._GetStableChannelInfo(21),
self._GetStableChannelInfo(20),
self._GetStableChannelInfo(19),
self._GetStableChannelInfo(18),
self._GetStableChannelInfo(17),
self._GetStableChannelInfo(16),
self._GetStableChannelInfo(15),
self._GetStableChannelInfo(14),
self._GetStableChannelInfo(13),
self._GetStableChannelInfo(12),
self._GetStableChannelInfo(11)], iterations)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | self._GetStableChannelInfo(13),
self._GetStableChannelInfo(14),
self._GetStableChannelInfo(15),
self._GetStableChannelInfo(16), |
<|file_name|>lyg0vtw_client.py<|end_file_name|><|fim▁begin|><|fim▁hole|>'A simple client for accessing api.ly.g0v.tw.'
import json
import unittest
try:
import urllib.request as request
import urllib.parse as urlparse
except:
import urllib2 as request
import urllib as urlparse
def assert_args(func, *args):
def inner(*args):
required_arg = args[1]
assert(len(required_arg) > 0)
return func(*args)
return inner
class LY_G0V_Client:
BASE_URL = 'http://api-beta.ly.g0v.tw/v0/'
# BASE_URL = 'http://api.ly.g0v.tw/v0/'
def _fetch_data(self, url_path):
URL = LY_G0V_Client.BASE_URL + url_path
try:
f = request.urlopen(URL)
r = f.read()
r = r.decode('utf-8')
return json.loads(r)
except Exception as e:
print("Failed to call " + URL)
raise e
def fetch_all_bills(self):
'Fetch all bills.'
return self._fetch_data('collections/bills')
def fetch_all_motions(self):
'Fetch all motions.'
return self._fetch_data('collections/motions')
def fetch_all_sittings(self):
'Fetch all sittings.'
return self._fetch_data('collections/sittings')
@assert_args
def fetch_bill(self, bill_id):
'Fetch metadata of a specific bill.'
return self._fetch_data('collections/bills/' + str(bill_id))
@assert_args
def fetch_bill_data(self, bill_id):
'Fetch data of a specific bill.'
assert(len(bill_id) > 0)
return self._fetch_data('collections/bills/' + str(bill_id) + '/data')
@assert_args
def fetch_motions_related_with_bill(self, bill_id):
'Fetch motions related with a specific bill.'
query = json.dumps({'bill_ref': bill_id})
query = urlparse.quote(query)
return self._fetch_data('collections/motions/?q='+query)
@assert_args
def fetch_sitting(self, sitting_id):
'Fetch metadata of a specific bill.'
return self._fetch_data('collections/bills/' + str(bill_id))
class TestClient(unittest.TestCase):
def setUp(self):
import time
time.sleep(1)
self.client = LY_G0V_Client()
def _test_bill(self, bill):
self.assertTrue(isinstance(bill, dict), str(type(bill)))
keys = ('proposed_by', 'doc', 'abstract', 'sponsors',
'summary', 'bill_ref', 'motions', 'cosponsors',
'bill_id');
for key in keys:
self.assertTrue(key in bill)
if isinstance(bill['doc'], dict):
self.assertTrue('pdf' in bill['doc'])
self.assertTrue('doc' in bill['doc'])
def _test_bills(self, bills):
for key in ('entries', 'paging'):
self.assertTrue(key in bills)
for key in ('l', 'sk', 'count'):
self.assertTrue(key in bills['paging'])
for bill in bills['entries']:
self._test_bill(bill)
def _test_motion(self, motion):
self.assertTrue(isinstance(motion, dict), str(type(motion)))
keys = ('result', 'resolution', 'motion_class', 'bill_id',
'agenda_item', 'bill_ref', 'tts_id',
'subitem', 'status', 'sitting_id', 'item',
'summary', 'tts_seq', 'proposed_by', 'doc')
for key in keys:
self.assertTrue(key in motion, key)
if isinstance(motion['doc'], dict):
self.assertTrue('pdf' in motion['doc'])
self.assertTrue('doc' in motion['doc'])
def _test_motions(self, motions):
self.assertTrue(isinstance(motions, dict), str(type(motions)))
for key in ('entries', 'paging'):
self.assertTrue(key in motions)
for key in ('l', 'sk', 'count'):
self.assertTrue(key in motions['paging'])
for motion in motions['entries']:
self._test_motion(motion)
def _test_data(self, data):
for key in ('related', 'content'):
self.assertTrue(key in data)
self.assertTrue(isinstance(data['related'], list))
self.assertTrue(isinstance(data['content'], list))
for item in data['content']:
content_keys = ('name', 'type', 'content', 'header')
for content_key in content_keys:
self.assertTrue(content_key in item)
self.assertTrue(len(item['name']) > 0)
self.assertTrue(isinstance(item['name'], str) or \
isinstance(item['name'], unicode))
self.assertTrue(len(item['type']) > 0)
self.assertTrue(isinstance(item['type'], str) or \
isinstance(item['type'], unicode))
self.assertTrue(len(item['content']) > 0)
self.assertTrue(isinstance(item['content'], list))
for content in item['content']:
self.assertTrue(isinstance(content, list))
for line in content:
self.assertTrue(isinstance(line, str))
self.assertTrue(len(item['header']) > 0)
self.assertTrue(isinstance(item['header'], list))
for header in item['header']:
self.assertTrue(isinstance(header, str) or \
isinstance(header, unicode))
def _test_sitting(self, sitting):
self.assertTrue(isinstance(sitting, dict), str(type(sitting)))
keys = ('dates', 'ad', 'videos', 'extra', 'motions',
'sitting', 'summary', 'session', 'committee', 'id',
'name')
for key in keys:
self.assertTrue(key in sitting, key)
def _test_sittings(self, sittings):
self.assertTrue(isinstance(sittings, dict), str(type(sittings)))
for key in ('entries', 'paging'):
self.assertTrue(key in sittings)
for key in ('l', 'sk', 'count'):
self.assertTrue(key in sittings['paging'])
for sitting in sittings['entries']:
self._test_sitting(sitting)
def test_all_bills(self):
bills = self.client.fetch_all_bills()
self._test_bills(bills)
def test_all_motions(self):
motions = self.client.fetch_all_motions()
self._test_motions(motions)
def test_all_sittings(self):
sittings = self.client.fetch_all_sittings()
self._test_sittings(sittings)
def test_fetch_bill(self):
bill = self.client.fetch_bill('1021021071000400')
self._test_bill(bill)
def test_fetch_bill_data(self):
data = self.client.fetch_bill_data('1021021071000400')
self._test_data(data)
def test_fetch_motions_related_with_bill(self):
motions = self.client.fetch_motions_related_with_bill('1021021071000400')
self._test_motions(motions)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | #!/usr/bin/env python
# encoding: utf-8
|
<|file_name|>EntityWhereChange.d.ts<|end_file_name|><|fim▁begin|>import { AbstractEntityChangeApi, AbstractEntityChange, StubAbstractEntityChange } from "./AbstractEntityChange";
/**
* Created by Papa on 9/15/2016.
*/
export interface EntityWhereChangeApi extends AbstractEntityChangeApi {
numberOfAffectedRecords: number;
queryJson: string;
}
export declare class EntityWhereChange extends AbstractEntityChange implements EntityWhereChangeApi {
numberOfAffectedRecords: number;
queryJson: string;<|fim▁hole|> numberOfAffectedRecords: number;
queryJson: string;
}<|fim▁end|> | }
export declare class StubWhereEntityChange extends StubAbstractEntityChange implements EntityWhereChangeApi { |
<|file_name|>callback.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Yusuke Sasaki
//
// This software is released under the MIT License.
// See http://opensource.org/licenses/mit-license.php or <LICENSE>.
use ffi;
use itertools::{Itertools, Zip};
use std::mem::transmute;
use std::ops::Deref;
use std::ptr::null;
use std::os::raw;
use error::{Error, Result};
use model::{Model, Var, ConstrSense};
use model::expr::LinExpr;
use util;
// Location where the callback called.
const POLLING: i32 = 0;
const PRESOLVE: i32 = 1;
const SIMPLEX: i32 = 2;
const MIP: i32 = 3;
const MIPSOL: i32 = 4;<|fim▁hole|>
const PRE_COLDEL: i32 = 1000;
const PRE_ROWDEL: i32 = 1001;
const PRE_SENCHG: i32 = 1002;
const PRE_BNDCHG: i32 = 1003;
const PRE_COECHG: i32 = 1004;
const SPX_ITRCNT: i32 = 2000;
const SPX_OBJVAL: i32 = 2001;
const SPX_PRIMINF: i32 = 2002;
const SPX_DUALINF: i32 = 2003;
const SPX_ISPERT: i32 = 2004;
const MIP_OBJBST: i32 = 3000;
const MIP_OBJBND: i32 = 3001;
const MIP_NODCNT: i32 = 3002;
const MIP_SOLCNT: i32 = 3003;
const MIP_CUTCNT: i32 = 3004;
const MIP_NODLFT: i32 = 3005;
const MIP_ITRCNT: i32 = 3006;
#[allow(dead_code)]
const MIP_OBJBNDC: i32 = 3007;
const MIPSOL_SOL: i32 = 4001;
const MIPSOL_OBJ: i32 = 4002;
const MIPSOL_OBJBST: i32 = 4003;
const MIPSOL_OBJBND: i32 = 4004;
const MIPSOL_NODCNT: i32 = 4005;
const MIPSOL_SOLCNT: i32 = 4006;
#[allow(dead_code)]
const MIPSOL_OBJBNDC: i32 = 4007;
const MIPNODE_STATUS: i32 = 5001;
const MIPNODE_REL: i32 = 5002;
const MIPNODE_OBJBST: i32 = 5003;
const MIPNODE_OBJBND: i32 = 5004;
const MIPNODE_NODCNT: i32 = 5005;
const MIPNODE_SOLCNT: i32 = 5006;
#[allow(dead_code)]
const MIPNODE_BRVAR: i32 = 5007;
#[allow(dead_code)]
const MIPNODE_OBJBNDC: i32 = 5008;
const MSG_STRING: i32 = 6001;
const RUNTIME: i32 = 6002;
const BARRIER_ITRCNT: i32 = 7001;
const BARRIER_PRIMOBJ: i32 = 7002;
const BARRIER_DUALOBJ: i32 = 7003;
const BARRIER_PRIMINF: i32 = 7004;
const BARRIER_DUALINF: i32 = 7005;
const BARRIER_COMPL: i32 = 7006;
/// Location where the callback called
///
/// If you want to get more information, see [official
/// manual](https://www.gurobi.com/documentation/6.5/refman/callback_codes.html).
#[derive(Debug, Clone)]
pub enum Where {
/// Periodic polling callback
Polling,
/// Currently performing presolve
PreSolve {
/// The number of columns removed by presolve to this point.
coldel: i32,
/// The number of rows removed by presolve to this point.
rowdel: i32,
/// The number of constraint senses changed by presolve to this point.
senchg: i32,
/// The number of variable bounds changed by presolve to this point.
bndchg: i32,
/// The number of coefficients changed by presolve to this point.
coecfg: i32
},
/// Currently in simplex
Simplex {
/// Current simplex iteration count.
itrcnt: f64,
/// Current simplex objective value.
objval: f64,
/// Current primal infeasibility.
priminf: f64,
/// Current dual infeasibility.
dualinf: f64,
/// Is problem current perturbed?
ispert: i32
},
/// Currently in MIP
MIP {
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64,
/// Current count of cutting planes applied.
cutcnt: i32,
/// Current unexplored node count.
nodleft: f64,
/// Current simplex iteration count.
itrcnt: f64
},
/// Found a new MIP incumbent
MIPSol {
/// Objective value for new solution.
obj: f64,
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: f64
},
/// Currently exploring a MIP node
MIPNode {
/// Optimization status of current MIP node (see the Status Code section for further information).
status: i32,
/// Current best objective.
objbst: f64,
/// Current best objective bound.
objbnd: f64,
/// Current explored node count.
nodcnt: f64,
/// Current count of feasible solutions found.
solcnt: i32
},
/// Printing a log message
Message(String),
/// Currently in barrier.
Barrier {
/// Current barrier iteration count.
itrcnt: i32,
/// Primal objective value for current barrier iterate.
primobj: f64,
/// Dual objective value for current barrier iterate.
dualobj: f64,
/// Primal infeasibility for current barrier iterate.
priminf: f64,
/// Dual infeasibility for current barrier iterate.
dualinf: f64,
/// Complementarity violation for current barrier iterate.
compl: f64
}
}
impl Into<i32> for Where {
fn into(self) -> i32 {
match self {
Where::Polling => POLLING,
Where::PreSolve { .. } => PRESOLVE,
Where::Simplex { .. } => SIMPLEX,
Where::MIP { .. } => MIP,
Where::MIPSol { .. } => MIPSOL,
Where::MIPNode { .. } => MIPNODE,
Where::Message(_) => MESSAGE,
Where::Barrier { .. } => BARRIER,
}
}
}
/// The context object for Gurobi callback.
pub struct Callback<'a> {
cbdata: *mut ffi::c_void,
where_: Where,
model: &'a Model
}
pub trait New<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>>;
}
impl<'a> New<'a> for Callback<'a> {
fn new(cbdata: *mut ffi::c_void, where_: i32, model: &'a Model) -> Result<Callback<'a>> {
let mut callback = Callback {
cbdata: cbdata,
where_: Where::Polling,
model: model
};
let where_ = match where_ {
POLLING => Where::Polling,
PRESOLVE => {
Where::PreSolve {
coldel: try!(callback.get_int(PRESOLVE, PRE_COLDEL)),
rowdel: try!(callback.get_int(PRESOLVE, PRE_ROWDEL)),
senchg: try!(callback.get_int(PRESOLVE, PRE_SENCHG)),
bndchg: try!(callback.get_int(PRESOLVE, PRE_BNDCHG)),
coecfg: try!(callback.get_int(PRESOLVE, PRE_COECHG))
}
}
SIMPLEX => {
Where::Simplex {
itrcnt: try!(callback.get_double(SIMPLEX, SPX_ITRCNT)),
objval: try!(callback.get_double(SIMPLEX, SPX_OBJVAL)),
priminf: try!(callback.get_double(SIMPLEX, SPX_PRIMINF)),
dualinf: try!(callback.get_double(SIMPLEX, SPX_DUALINF)),
ispert: try!(callback.get_int(SIMPLEX, SPX_ISPERT))
}
}
MIP => {
Where::MIP {
objbst: try!(callback.get_double(MIP, MIP_OBJBST)),
objbnd: try!(callback.get_double(MIP, MIP_OBJBND)),
nodcnt: try!(callback.get_double(MIP, MIP_NODCNT)),
solcnt: try!(callback.get_double(MIP, MIP_SOLCNT)),
cutcnt: try!(callback.get_int(MIP, MIP_CUTCNT)),
nodleft: try!(callback.get_double(MIP, MIP_NODLFT)),
itrcnt: try!(callback.get_double(MIP, MIP_ITRCNT))
}
}
MIPSOL => {
Where::MIPSol {
obj: try!(callback.get_double(MIPSOL, MIPSOL_OBJ)),
objbst: try!(callback.get_double(MIPSOL, MIPSOL_OBJBST)),
objbnd: try!(callback.get_double(MIPSOL, MIPSOL_OBJBND)),
nodcnt: try!(callback.get_double(MIPSOL, MIPSOL_NODCNT)),
solcnt: try!(callback.get_double(MIPSOL, MIPSOL_SOLCNT))
}
}
MIPNODE => {
Where::MIPNode {
status: try!(callback.get_int(MIPNODE, MIPNODE_STATUS)),
objbst: try!(callback.get_double(MIPNODE, MIPNODE_OBJBST)),
objbnd: try!(callback.get_double(MIPNODE, MIPNODE_OBJBND)),
nodcnt: try!(callback.get_double(MIPNODE, MIPNODE_NODCNT)),
solcnt: try!(callback.get_int(MIPNODE, MIPNODE_SOLCNT))
}
}
MESSAGE => Where::Message(try!(callback.get_string(MESSAGE, MSG_STRING)).trim().to_owned()),
BARRIER => {
Where::Barrier {
itrcnt: try!(callback.get_int(BARRIER, BARRIER_ITRCNT)),
primobj: try!(callback.get_double(BARRIER, BARRIER_PRIMOBJ)),
dualobj: try!(callback.get_double(BARRIER, BARRIER_DUALOBJ)),
priminf: try!(callback.get_double(BARRIER, BARRIER_PRIMINF)),
dualinf: try!(callback.get_double(BARRIER, BARRIER_DUALINF)),
compl: try!(callback.get_double(BARRIER, BARRIER_COMPL))
}
}
_ => panic!("Invalid callback location. {}", where_)
};
callback.where_ = where_;
Ok(callback)
}
}
impl<'a> Callback<'a> {
/// Retrieve the location where the callback called.
pub fn get_where(&self) -> Where { self.where_.clone() }
/// Retrive node relaxation solution values at the current node.
pub fn get_node_rel(&self, vars: &[Var]) -> Result<Vec<f64>> {
// memo: only MIPNode && status == Optimal
self.get_double_array(MIPNODE, MIPNODE_REL).map(|buf| vars.iter().map(|v| buf[v.index() as usize]).collect_vec())
}
/// Retrieve values from the current solution vector.
pub fn get_solution(&self, vars: &[Var]) -> Result<Vec<f64>> {
self.get_double_array(MIPSOL, MIPSOL_SOL).map(|buf| vars.iter().map(|v| buf[v.index() as usize]).collect_vec())
}
/// Provide a new feasible solution for a MIP model.
pub fn set_solution(&self, vars: &[Var], solution: &[f64]) -> Result<()> {
if vars.len() != solution.len() || vars.len() < self.model.vars.len() {
return Err(Error::InconsitentDims);
}
let mut buf = vec![0.0; self.model.vars.len()];
for (v, &sol) in Zip::new((vars.iter(), solution.iter())) {
let i = v.index() as usize;
buf[i] = sol;
}
self.check_apicall(unsafe { ffi::GRBcbsolution(self.cbdata, buf.as_ptr()) })
}
/// Retrieve the elapsed solver runtime [sec].
pub fn get_runtime(&self) -> Result<f64> {
if let Where::Polling = self.get_where() {
return Err(Error::FromAPI("bad call in callback".to_owned(), 40001));
}
self.get_double(self.get_where().into(), RUNTIME)
}
/// Add a new cutting plane to the MIP model.
pub fn add_cut(&self, lhs: LinExpr, sense: ConstrSense, rhs: f64) -> Result<()> {
let (vars, coeff, offset) = lhs.into();
self.check_apicall(unsafe {
ffi::GRBcbcut(self.cbdata,
coeff.len() as ffi::c_int,
vars.as_ptr(),
coeff.as_ptr(),
sense.into(),
rhs - offset)
})
}
/// Add a new lazy constraint to the MIP model.
pub fn add_lazy(&self, lhs: LinExpr, sense: ConstrSense, rhs: f64) -> Result<()> {
let (vars, coeff, offset) = lhs.into();
self.check_apicall(unsafe {
ffi::GRBcblazy(self.cbdata,
coeff.len() as ffi::c_int,
vars.as_ptr(),
coeff.as_ptr(),
sense.into(),
rhs - offset)
})
}
fn get_int(&self, where_: i32, what: i32) -> Result<i32> {
let mut buf = 0;
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, &mut buf as *mut i32 as *mut raw::c_void) }).and(Ok(buf.into()))
}
fn get_double(&self, where_: i32, what: i32) -> Result<f64> {
let mut buf = 0.0;
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, &mut buf as *mut f64 as *mut raw::c_void) }).and(Ok(buf.into()))
}
fn get_double_array(&self, where_: i32, what: i32) -> Result<Vec<f64>> {
let mut buf = vec![0.0; self.model.vars.len()];
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, transmute(buf.as_mut_ptr())) }).and(Ok(buf))
}
fn get_string(&self, where_: i32, what: i32) -> Result<String> {
let mut buf = null();
self.check_apicall(unsafe { ffi::GRBcbget(self.cbdata, where_, what, &mut buf as *mut *const i8 as *mut raw::c_void) })
.and(Ok(unsafe { util::from_c_str(buf) }))
}
fn check_apicall(&self, error: ffi::c_int) -> Result<()> {
if error != 0 {
return Err(Error::FromAPI("Callback error".to_owned(), 40000));
}
Ok(())
}
}
impl<'a> Deref for Callback<'a> {
type Target = Model;
fn deref(&self) -> &Model { self.model }
}<|fim▁end|> | const MIPNODE: i32 = 5;
const MESSAGE: i32 = 6;
const BARRIER: i32 = 7;
|
<|file_name|>validation.js<|end_file_name|><|fim▁begin|>/*
============================================
G5Framework
=============================================
validation.js
==============================================
*/
var formValidation;
(function($){
formValidation = function(){
$('.form-validation').each(function() {
var _this = this,
_errorLabel = $(_this).find('.form-errors');
//Email Regex
$.validator.addMethod('emailRegex', function(value, element) {
return this.optional(element) || /^(([A-Za-z0-9]+_+)|([A-Za-z0-9]+\-+)|([A-Za-z0-9]+\.+)|([A-Za-z0-9]+\++))*[A-Za-z0-9]+@((\w+\-+)|(\w+\.))*\w{1,63}\.[a-zA-Z]{2,6}$/i.test(value); },
'Email must contain only letters, numbers, and acceptable symbols.'
);
//Text Regex
$.validator.addMethod('textRegex', function(value, element) {
return this.optional(element) || /^([a-zA-Z '-]+)$/i.test(value); },
'No special characters allowed.'
);
//Not Equals Value
$.validator.addMethod('valueNotEquals', function(value, element, arg){
return arg != value; },
'Value must not equal arg.'
);
//Validate
$(_this).validate({
errorLabelContainer: _errorLabel,
messages: {
'name': {
required: 'Please enter a valid name'
},
'email': {
required: 'Please enter a valid email address'
}
},
rules: {
'name': {
minlength: 2,
maxlength: 22,
textRegex: true
},
'email': {
email: true,
maxlength: 30,
emailRegex: true
},
'primary-search-input': {
minlength: 2,
maxlength: 22,
textRegex: true
}
},
invalidHandler: function(form) {
//Error
// console.log('form validation: error');
},
submitHandler: function(form) {
// console.log("form submitted");
// console.log(_this);
if ($(_this).hasClass('seasonal')) {
sendFeedback();
return false;
}
// console.log("Submission");
if ($(_this).hasClass('top-ten')) {
// console.log("top ten");
}
var postData = $(_this).serialize();
//Success
// console.log('form validation: success');
// console.log(postData);
//$(_this).hide();
//$(_this).next('.thank-you').fadeIn('slow');
/*
$.ajax({
type: 'POST',
url: '/somewhere',
data: postData,
success: function() {
// console.log('ajax success');
return false;
}
});
*/
return false;
}
});
});//
if ( $('.form-validation').find('.action-link').length > 0 ) {
$('.form-validation').find('.button.action-link').on('click', function(event){
$(this).parent('form').submit();
event.preventDefault();
});
}
//Check Validation & Match Errors
$('.form-validation').submit(function(){
var phoneField = $('.phone-format'),
checkBoxes = $(this).find('input[type="checkbox"], input[type="radio"]');
//Remove dashes in phone field after submit
if ( phoneField.length > 0 ) {
phoneField.each(function() {
$(this).val( $(this).val().replace(/[^\d.]/g, '') );
});
};
//Add Errors
if ( checkBoxes.hasClass('error') ) {
var _invalid = checkBoxes.filter('.error');
_invalid.each(function(){
var _this = this,
_invalidName = $(_this).attr('name');
$('input[name="' + _invalidName + '"]').addClass('error').siblings('i').addClass('error');
});<|fim▁hole|>
});
};
}(jQuery));<|fim▁end|> |
}; |
<|file_name|>_schema.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import jsonschema
import yaml
from snapcraft.internal import common
class SnapcraftSchemaError(Exception):
@property<|fim▁hole|> def message(self):
return self._message
def __init__(self, message):
self._message = message
class Validator:
def __init__(self, snapcraft_yaml=None):
"""Create a validation instance for snapcraft_yaml."""
self._snapcraft = snapcraft_yaml if snapcraft_yaml else {}
self._load_schema()
@property
def schema(self):
"""Return all schema properties."""
return self._schema['properties'].copy()
@property
def part_schema(self):
"""Return part-specific schema properties."""
sub = self.schema['parts']['patternProperties']
properties = sub['^(?!plugins$)[a-z0-9][a-z0-9+-\/]*$']['properties']
return properties
def _load_schema(self):
schema_file = os.path.abspath(os.path.join(
common.get_schemadir(), 'snapcraft.yaml'))
try:
with open(schema_file) as fp:
self._schema = yaml.load(fp)
except FileNotFoundError:
raise SnapcraftSchemaError(
'snapcraft validation file is missing from installation path')
def validate(self):
format_check = jsonschema.FormatChecker()
try:
jsonschema.validate(
self._snapcraft, self._schema, format_checker=format_check)
except jsonschema.ValidationError as e:
messages = [e.message]
path = []
while e.absolute_path:
element = e.absolute_path.popleft()
# assume numbers are indices and use 'xxx[123]' notation.
if isinstance(element, int):
path[-1] = '{}[{}]'.format(path[-1], element)
else:
path.append(str(element))
if path:
messages.insert(0, "The '{}' property does not match the "
"required schema:".format('/'.join(path)))
if e.cause:
messages.append('({})'.format(e.cause))
raise SnapcraftSchemaError(' '.join(messages))<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed values.
use app_units::Au;
use euclid::size::Size2D;
use font_metrics::FontMetricsProvider;
use properties::ComputedValues;
use std::fmt;
use style_traits::ToCss;
use super::{CSSFloat, specified};
pub use cssparser::Color as CSSColor;
pub use self::image::{AngleOrCorner, EndingShape as GradientShape, Gradient, GradientKind, Image};
pub use self::image::{LengthOrKeyword, LengthOrPercentageOrKeyword};
pub use super::{Auto, Either, None_};
#[cfg(feature = "gecko")]
pub use super::specified::AlignJustifyContent;
pub use super::specified::{Angle, BorderStyle, GridLine, Time, UrlOrNone};
pub use super::specified::url::UrlExtraData;
pub use self::length::{CalcLengthOrPercentage, Length, LengthOrNumber, LengthOrPercentage, LengthOrPercentageOrAuto};
pub use self::length::{LengthOrPercentageOrAutoOrContent, LengthOrPercentageOrNone, LengthOrNone};
pub use self::position::Position;
pub mod basic_shape;
pub mod image;
pub mod length;
pub mod position;
/// A `Context` is all the data a specified value could ever need to compute
/// itself and be transformed to a computed value.
pub struct Context<'a> {
/// Whether the current element is the root element.
pub is_root_element: bool,
/// The current viewport size.
pub viewport_size: Size2D<Au>,
/// The style we're inheriting from.
pub inherited_style: &'a ComputedValues,
/// Values access through this need to be in the properties "computed
/// early": color, text-decoration, font-size, display, position, float,
/// border-*-style, outline-style, font-family, writing-mode...
pub style: ComputedValues,
/// A font metrics provider, used to access font metrics to implement
/// font-relative units.
///
/// TODO(emilio): This should be required, see #14079.
pub font_metrics_provider: Option<&'a FontMetricsProvider>,
}
impl<'a> Context<'a> {
/// Whether the current element is the root element.
pub fn is_root_element(&self) -> bool { self.is_root_element }
/// The current viewport size.
pub fn viewport_size(&self) -> Size2D<Au> { self.viewport_size }
/// The style we're inheriting from.
pub fn inherited_style(&self) -> &ComputedValues { &self.inherited_style }
/// The current style. Note that only "eager" properties should be accessed
/// from here, see the comment in the member.
pub fn style(&self) -> &ComputedValues { &self.style }
/// A mutable reference to the current style.
pub fn mutate_style(&mut self) -> &mut ComputedValues { &mut self.style }
}
/// A trait to represent the conversion between computed and specified values.
pub trait ToComputedValue {
/// The computed value type we're going to be converted to.
type ComputedValue;
/// Convert a specified value to a computed value, using itself and the data
/// inside the `Context`.
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue;
#[inline]
/// Convert a computed value to specified value form.
///
/// This will be used for recascading during animation.
/// Such from_computed_valued values should recompute to the same value.
fn from_computed_value(computed: &Self::ComputedValue) -> Self;
}
/// A marker trait to represent that the specified value is also the computed
/// value.
pub trait ComputedValueAsSpecified {}
impl<T> ToComputedValue for T
where T: ComputedValueAsSpecified + Clone,
{
type ComputedValue = T;
#[inline]
fn to_computed_value(&self, _context: &Context) -> T {
self.clone()
}
#[inline]
fn from_computed_value(computed: &T) -> Self {
computed.clone()
}
}
impl ToComputedValue for specified::CSSColor {
type ComputedValue = CSSColor;
#[inline]
fn to_computed_value(&self, _context: &Context) -> CSSColor {
self.parsed
}
#[inline]
fn from_computed_value(computed: &CSSColor) -> Self {
specified::CSSColor {
parsed: *computed,
authored: None,
}
}
}
#[cfg(feature = "gecko")]
impl ComputedValueAsSpecified for specified::AlignJustifyContent {}
impl ComputedValueAsSpecified for specified::BorderStyle {}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct BorderRadiusSize(pub Size2D<LengthOrPercentage>);
impl BorderRadiusSize {
#[allow(missing_docs)]
pub fn zero() -> BorderRadiusSize {
BorderRadiusSize(Size2D::new(LengthOrPercentage::Length(Au(0)), LengthOrPercentage::Length(Au(0))))
}<|fim▁hole|>
#[inline]
fn to_computed_value(&self, context: &Context) -> BorderRadiusSize {
let w = self.0.width.to_computed_value(context);
let h = self.0.height.to_computed_value(context);
BorderRadiusSize(Size2D::new(w, h))
}
#[inline]
fn from_computed_value(computed: &BorderRadiusSize) -> Self {
let w = ToComputedValue::from_computed_value(&computed.0.width);
let h = ToComputedValue::from_computed_value(&computed.0.height);
specified::BorderRadiusSize(Size2D::new(w, h))
}
}
impl ToCss for BorderRadiusSize {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(self.0.width.to_css(dest));
try!(dest.write_str("/"));
self.0.height.to_css(dest)
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct Shadow {
pub offset_x: Au,
pub offset_y: Au,
pub blur_radius: Au,
pub spread_radius: Au,
pub color: CSSColor,
pub inset: bool,
}
/// A `<number>` value.
pub type Number = CSSFloat;
/// A type used for opacity.
pub type Opacity = CSSFloat;
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
/// A computed cliprect for clip and image-region
pub struct ClipRect {
pub top: Au,
pub right: Option<Au>,
pub bottom: Option<Au>,
pub left: Au,
}
impl ToCss for ClipRect {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(dest.write_str("rect("));
try!(self.top.to_css(dest));
try!(dest.write_str(", "));
if let Some(right) = self.right {
try!(right.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
if let Some(bottom) = self.bottom {
try!(bottom.to_css(dest));
try!(dest.write_str(", "));
} else {
try!(dest.write_str("auto, "));
}
try!(self.left.to_css(dest));
dest.write_str(")")
}
}
/// rect(...) | auto
pub type ClipRectOrAuto = Either<ClipRect, Auto>;
impl ClipRectOrAuto {
/// Return an auto (default for clip-rect and image-region) value
pub fn auto() -> Self {
Either::Second(Auto)
}
/// Check if it is auto
pub fn is_auto(&self) -> bool {
match *self {
Either::Second(_) => true,
_ => false
}
}
}<|fim▁end|> | }
impl ToComputedValue for specified::BorderRadiusSize {
type ComputedValue = BorderRadiusSize; |
<|file_name|>prettydate.py<|end_file_name|><|fim▁begin|>def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
from datetime import datetime
now = datetime.now()<|fim▁hole|> diff = now - time
elif not time:
diff = now - now
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str(second_diff) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return str( second_diff / 60 ) + " minutes ago"
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return str( second_diff / 3600 ) + " hours ago"
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " days ago"
if day_diff < 31:
return str(day_diff/7) + " weeks ago"
if day_diff < 365:
return str(day_diff/30) + " months ago"
return str(day_diff/365) + " years ago"<|fim▁end|> | if type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time,datetime): |
<|file_name|>set_file_cache_expired_config.go<|end_file_name|><|fim▁begin|>package cdn
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// SetFileCacheExpiredConfig invokes the cdn.SetFileCacheExpiredConfig API synchronously
// api document: https://help.aliyun.com/api/cdn/setfilecacheexpiredconfig.html
func (client *Client) SetFileCacheExpiredConfig(request *SetFileCacheExpiredConfigRequest) (response *SetFileCacheExpiredConfigResponse, err error) {
response = CreateSetFileCacheExpiredConfigResponse()
err = client.DoAction(request, response)
return
}
// SetFileCacheExpiredConfigWithChan invokes the cdn.SetFileCacheExpiredConfig API asynchronously
// api document: https://help.aliyun.com/api/cdn/setfilecacheexpiredconfig.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) SetFileCacheExpiredConfigWithChan(request *SetFileCacheExpiredConfigRequest) (<-chan *SetFileCacheExpiredConfigResponse, <-chan error) {
responseChan := make(chan *SetFileCacheExpiredConfigResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.SetFileCacheExpiredConfig(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// SetFileCacheExpiredConfigWithCallback invokes the cdn.SetFileCacheExpiredConfig API asynchronously
// api document: https://help.aliyun.com/api/cdn/setfilecacheexpiredconfig.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) SetFileCacheExpiredConfigWithCallback(request *SetFileCacheExpiredConfigRequest, callback func(response *SetFileCacheExpiredConfigResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *SetFileCacheExpiredConfigResponse
var err error
defer close(result)
response, err = client.SetFileCacheExpiredConfig(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// SetFileCacheExpiredConfigRequest is the request struct for api SetFileCacheExpiredConfig
type SetFileCacheExpiredConfigRequest struct {
*requests.RpcRequest
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
SecurityToken string `position:"Query" name:"SecurityToken"`
DomainName string `position:"Query" name:"DomainName"`
CacheContent string `position:"Query" name:"CacheContent"`
TTL string `position:"Query" name:"TTL"`
Weight string `position:"Query" name:"Weight"`
}
// SetFileCacheExpiredConfigResponse is the response struct for api SetFileCacheExpiredConfig
type SetFileCacheExpiredConfigResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
}
// CreateSetFileCacheExpiredConfigRequest creates a request to invoke SetFileCacheExpiredConfig API
func CreateSetFileCacheExpiredConfigRequest() (request *SetFileCacheExpiredConfigRequest) {
request = &SetFileCacheExpiredConfigRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Cdn", "2014-11-11", "SetFileCacheExpiredConfig", "", "")
return
}
// CreateSetFileCacheExpiredConfigResponse creates a response to parse from SetFileCacheExpiredConfig response
func CreateSetFileCacheExpiredConfigResponse() (response *SetFileCacheExpiredConfigResponse) {<|fim▁hole|> }
return
}<|fim▁end|> | response = &SetFileCacheExpiredConfigResponse{
BaseResponse: &responses.BaseResponse{}, |
<|file_name|>Network.py<|end_file_name|><|fim▁begin|>import os
import re
import netifaces as ni
from socket import *
from Components.Console import Console
from Components.PluginComponent import plugins
from Plugins.Plugin import PluginDescriptor
from boxbranding import getBoxType
class Network:
def __init__(self):
self.ifaces = {}
self.configuredNetworkAdapters = []
self.NetworkState = 0
self.DnsState = 0
self.nameservers = []
self.ethtool_bin = "/usr/sbin/ethtool"
self.console = Console()
self.linkConsole = Console()
self.restartConsole = Console()
self.deactivateInterfaceConsole = Console()
self.activateInterfaceConsole = Console()
self.resetNetworkConsole = Console()
self.dnsConsole = Console()
self.pingConsole = Console()
self.config_ready = None
self.friendlyNames = {}
self.lan_interfaces = []
self.wlan_interfaces = []
self.remoteRootFS = None
self.getInterfaces()
def onRemoteRootFS(self):
if self.remoteRootFS is None:
import Harddisk
for parts in Harddisk.getProcMounts():
if parts[1] == '/' and parts[2] == 'nfs':
self.remoteRootFS = True
break
else:
self.remoteRootFS = False
return self.remoteRootFS
def isBlacklisted(self, iface):
return iface in ('lo', 'wifi0', 'wmaster0', 'sit0', 'tun0', 'sys0', 'p2p0')
def getInterfaces(self, callback=None):
self.configuredInterfaces = []
for device in self.getInstalledAdapters():
self.getAddrInet(device, callback)
# helper function
def regExpMatch(self, pattern, string):
if string is None:
return None
try:
return pattern.search(string).group()
except AttributeError:
return None
# helper function to convert ips from a sring to a list of ints
def convertIP(self, ip):
return [int(n) for n in ip.split('.')]
def getAddrInet(self, iface, callback):
data = {'up': False, 'dhcp': False, 'preup': False, 'predown': False}
try:
data['up'] = int(open('/sys/class/net/%s/flags' % iface).read().strip(), 16) & 1 == 1
if data['up']:
self.configuredInterfaces.append(iface)
nit = ni.ifaddresses(iface)
data['ip'] = self.convertIP(nit[ni.AF_INET][0]['addr']) # ipv4
data['netmask'] = self.convertIP(nit[ni.AF_INET][0]['netmask'])
data['bcast'] = self.convertIP(nit[ni.AF_INET][0]['broadcast'])
data['mac'] = nit[ni.AF_LINK][0]['addr'] # mac
data['gateway'] = self.convertIP(ni.gateways()['default'][ni.AF_INET][0]) # default gw
except:
data['dhcp'] = True
data['ip'] = [0, 0, 0, 0]
data['netmask'] = [0, 0, 0, 0]
data['gateway'] = [0, 0, 0, 0]
self.ifaces[iface] = data
self.loadNetworkConfig(iface, callback)
def writeNetworkConfig(self):
self.configuredInterfaces = []
fp = file('/etc/network/interfaces', 'w')
fp.write("# automatically generated by enigma2\n# do NOT change manually!\n\n")
fp.write("auto lo\n")
fp.write("iface lo inet loopback\n\n")
for ifacename, iface in self.ifaces.items():
if iface['up']:
fp.write("auto " + ifacename + "\n")
self.configuredInterfaces.append(ifacename)
if iface['dhcp']:
fp.write("iface " + ifacename + " inet dhcp\n")
fp.write("udhcpc_opts -T1 -t9\n")
if not iface['dhcp']:
fp.write("iface " + ifacename + " inet static\n")
if 'ip' in iface:
print tuple(iface['ip'])
fp.write(" address %d.%d.%d.%d\n" % tuple(iface['ip']))
fp.write(" netmask %d.%d.%d.%d\n" % tuple(iface['netmask']))
if 'gateway' in iface:
fp.write(" gateway %d.%d.%d.%d\n" % tuple(iface['gateway']))
if "configStrings" in iface:
fp.write(iface["configStrings"])
if iface["preup"] is not False and "configStrings" not in iface:
fp.write(iface["preup"])
if iface["predown"] is not False and "configStrings" not in iface:
fp.write(iface["predown"])
fp.write("\n")
fp.close()
self.configuredNetworkAdapters = self.configuredInterfaces
self.writeNameserverConfig()
def writeNameserverConfig(self):
fp = file('/etc/resolv.conf', 'w')
for nameserver in self.nameservers:
fp.write("nameserver %d.%d.%d.%d\n" % tuple(nameserver))
fp.close()
def loadNetworkConfig(self, iface, callback=None):
interfaces = []
# parse the interfaces-file
try:
fp = file('/etc/network/interfaces', 'r')
interfaces = fp.readlines()
fp.close()
except:
print "[Network.py] interfaces - opening failed"
ifaces = {}<|fim▁hole|> for i in interfaces:
split = i.strip().split(' ')
if split[0] == "iface":
currif = split[1]
ifaces[currif] = {}
if len(split) == 4 and split[3] == "dhcp":
ifaces[currif]["dhcp"] = True
else:
ifaces[currif]["dhcp"] = False
if currif == iface: #read information only for available interfaces
if split[0] == "address":
ifaces[currif]["address"] = map(int, split[1].split('.'))
if "ip" in self.ifaces[currif]:
if self.ifaces[currif]["ip"] != ifaces[currif]["address"] and ifaces[currif]["dhcp"] == False:
self.ifaces[currif]["ip"] = map(int, split[1].split('.'))
if split[0] == "netmask":
ifaces[currif]["netmask"] = map(int, split[1].split('.'))
if "netmask" in self.ifaces[currif]:
if self.ifaces[currif]["netmask"] != ifaces[currif]["netmask"] and ifaces[currif]["dhcp"] == False:
self.ifaces[currif]["netmask"] = map(int, split[1].split('.'))
if split[0] == "gateway":
ifaces[currif]["gateway"] = map(int, split[1].split('.'))
if "gateway" in self.ifaces[currif]:
if self.ifaces[currif]["gateway"] != ifaces[currif]["gateway"] and ifaces[currif]["dhcp"] == False:
self.ifaces[currif]["gateway"] = map(int, split[1].split('.'))
if split[0] == "pre-up":
if "preup" in self.ifaces[currif]:
self.ifaces[currif]["preup"] = i
if split[0] in ("pre-down", "post-down"):
if "predown" in self.ifaces[currif]:
self.ifaces[currif]["predown"] = i
for ifacename, iface in ifaces.items():
if ifacename in self.ifaces:
self.ifaces[ifacename]["dhcp"] = iface["dhcp"]
if not self.console.appContainers:
# save configured interfacelist
self.configuredNetworkAdapters = self.configuredInterfaces
# load ns only once
self.loadNameserverConfig()
print "read configured interface:", ifaces
print "self.ifaces after loading:", self.ifaces
self.config_ready = True
self.msgPlugins()
if callback is not None:
callback(True)
def loadNameserverConfig(self):
ipRegexp = "[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}"
nameserverPattern = re.compile("nameserver +" + ipRegexp)
ipPattern = re.compile(ipRegexp)
resolv = []
try:
fp = file('/etc/resolv.conf', 'r')
resolv = fp.readlines()
fp.close()
self.nameservers = []
except:
print "[Network.py] resolv.conf - opening failed"
for line in resolv:
if self.regExpMatch(nameserverPattern, line) is not None:
ip = self.regExpMatch(ipPattern, line)
if ip:
self.nameservers.append(self.convertIP(ip))
print "nameservers:", self.nameservers
def getInstalledAdapters(self):
return [x for x in os.listdir('/sys/class/net') if not self.isBlacklisted(x)]
def getConfiguredAdapters(self):
return self.configuredNetworkAdapters
def getNumberOfAdapters(self):
return len(self.ifaces)
def getFriendlyAdapterName(self, x):
if x in self.friendlyNames.keys():
return self.friendlyNames.get(x, x)
self.friendlyNames[x] = self.getFriendlyAdapterNaming(x)
return self.friendlyNames.get(x, x) # when we have no friendly name, use adapter name
def getFriendlyAdapterNaming(self, iface):
name = None
if self.isWirelessInterface(iface):
if iface not in self.wlan_interfaces:
name = _("WLAN connection")
if len(self.wlan_interfaces):
name += " " + str(len(self.wlan_interfaces) + 1)
self.wlan_interfaces.append(iface)
else:
if iface not in self.lan_interfaces:
if iface == "eth1":
name = _("VLAN connection")
else:
name = _("LAN connection")
if len(self.lan_interfaces) and not iface == "eth1":
name += " " + str(len(self.lan_interfaces) + 1)
self.lan_interfaces.append(iface)
return name
def getFriendlyAdapterDescription(self, iface):
if not self.isWirelessInterface(iface):
return _('Ethernet network interface')
moduledir = self.getWlanModuleDir(iface)
if moduledir:
name = os.path.basename(os.path.realpath(moduledir))
if name.startswith('ath') or name.startswith('carl'):
name = 'Atheros'
elif name.startswith('rt2') or name.startswith('rt3') or name.startswith('rt5') or name.startswith('rt6') or name.startswith('rt7'):
name = 'Ralink'
elif name.startswith('zd'):
name = 'Zydas'
elif name.startswith('rtl') or name.startswith('r8'):
name = 'Realtek'
elif name.startswith('smsc'):
name = 'SMSC'
elif name.startswith('peg'):
name = 'Pegasus'
elif name.startswith('rn'):
name = 'RNDIS'
elif name.startswith('mw') or name.startswith('libertas'):
name = 'Marvel'
elif name.startswith('p5'):
name = 'Prism'
elif name.startswith('as') or name.startswith('ax'):
name = 'ASIX'
elif name.startswith('dm'):
name = 'Davicom'
elif name.startswith('mcs'):
name = 'MosChip'
elif name.startswith('at'):
name = 'Atmel'
elif name.startswith('iwm'):
name = 'Intel'
elif name.startswith('brcm') or name.startswith('bcm'):
name = 'Broadcom'
elif os.path.isdir('/tmp/bcm/' + iface):
name = 'Broadcom'
else:
name = _('Unknown')
return name + ' ' + _('wireless network interface')
def getAdapterName(self, iface):
return iface
def getAdapterList(self):
return self.ifaces.keys()
def getAdapterAttribute(self, iface, attribute):
return self.ifaces.get(iface, {}).get(attribute)
def setAdapterAttribute(self, iface, attribute, value):
print "setting for adapter", iface, "attribute", attribute, " to value", value
if iface in self.ifaces:
self.ifaces[iface][attribute] = value
def removeAdapterAttribute(self, iface, attribute):
if iface in self.ifaces and attribute in self.ifaces[iface]:
del self.ifaces[iface][attribute]
def getNameserverList(self):
if len(self.nameservers) == 0:
return [[0, 0, 0, 0], [0, 0, 0, 0]]
else:
return self.nameservers
def clearNameservers(self):
self.nameservers = []
def addNameserver(self, nameserver):
if nameserver not in self.nameservers:
self.nameservers.append(nameserver)
def removeNameserver(self, nameserver):
if nameserver in self.nameservers:
self.nameservers.remove(nameserver)
def changeNameserver(self, oldnameserver, newnameserver):
if oldnameserver in self.nameservers:
for i in range(len(self.nameservers)):
if self.nameservers[i] == oldnameserver:
self.nameservers[i] = newnameserver
def resetNetworkConfig(self, mode='lan', callback=None):
self.commands = []
self.commands.append("/etc/init.d/avahi-daemon stop")
for iface in self.ifaces.keys():
if iface != 'eth0' or not self.onRemoteRootFS():
self.commands.append("/sbin/ip addr flush dev " + iface + " scope global")
self.commands.append("/etc/init.d/networking stop")
self.commands.append("killall -9 udhcpc")
self.commands.append("rm /var/run/udhcpc*")
self.resetNetworkConsole.eBatch(self.commands, self.resetNetworkFinishedCB, [mode, callback], debug=True)
def resetNetworkFinishedCB(self, extra_args):
(mode, callback) = extra_args
if not self.resetNetworkConsole.appContainers:
self.writeDefaultNetworkConfig(mode, callback)
def writeDefaultNetworkConfig(self, mode='lan', callback=None):
fp = file('/etc/network/interfaces', 'w')
fp.write("# automatically generated by enigma2\n# do NOT change manually!\n\n")
fp.write("auto lo\n")
fp.write("iface lo inet loopback\n\n")
if mode == 'wlan':
fp.write("auto wlan0\n")
fp.write("iface wlan0 inet dhcp\n")
if mode == 'wlan-mpci':
fp.write("auto ath0\n")
fp.write("iface ath0 inet dhcp\n")
if mode == 'lan':
fp.write("auto eth0\n")
fp.write("iface eth0 inet dhcp\n")
fp.write("\n")
fp.close()
self.commands = []
if mode == 'wlan':
self.commands.append("/sbin/ifconfig eth0 down")
self.commands.append("/sbin/ifconfig ath0 down")
self.commands.append("/sbin/ifconfig wlan0 up")
if mode == 'wlan-mpci':
self.commands.append("/sbin/ifconfig eth0 down")
self.commands.append("/sbin/ifconfig wlan0 down")
self.commands.append("/sbin/ifconfig ath0 up")
if mode == 'lan':
self.commands.append("/sbin/ifconfig eth0 up")
self.commands.append("/sbin/ifconfig wlan0 down")
self.commands.append("/sbin/ifconfig ath0 down")
self.commands.append("/etc/init.d/avahi-daemon start")
self.resetNetworkConsole.eBatch(self.commands, self.resetNetworkFinished, [mode, callback], debug=True)
def resetNetworkFinished(self, extra_args):
(mode, callback) = extra_args
if not self.resetNetworkConsole.appContainers:
if callback is not None:
callback(True, mode)
def checkNetworkState(self, statecallback):
self.NetworkState = 0
self.pingConsole = Console()
for server in ("www.openpli.org", "www.google.nl", "www.google.com"):
self.pingConsole.ePopen(("/bin/ping", "/bin/ping", "-c", "1", server), self.checkNetworkStateFinished, statecallback)
def checkNetworkStateFinished(self, result, retval, extra_args):
(statecallback) = extra_args
if self.pingConsole is not None:
if retval == 0:
self.pingConsole = None
statecallback(self.NetworkState)
else:
self.NetworkState += 1
if not self.pingConsole.appContainers:
statecallback(self.NetworkState)
def restartNetwork(self, callback=None):
self.config_ready = False
self.msgPlugins()
self.commands = []
self.commands.append("/etc/init.d/avahi-daemon stop")
for iface in self.ifaces.keys():
if iface != 'eth0' or not self.onRemoteRootFS():
self.commands.append(("/sbin/ifdown", "/sbin/ifdown", iface))
self.commands.append("/sbin/ip addr flush dev " + iface + " scope global")
self.commands.append("/etc/init.d/networking stop")
self.commands.append("killall -9 udhcpc")
self.commands.append("rm /var/run/udhcpc*")
self.commands.append("/etc/init.d/networking start")
self.commands.append("/etc/init.d/avahi-daemon start")
self.restartConsole.eBatch(self.commands, self.restartNetworkFinished, callback, debug=True)
def restartNetworkFinished(self, extra_args):
(callback) = extra_args
if callback is not None:
callback(True)
def getLinkState(self, iface, callback):
self.linkConsole.ePopen((self.ethtool_bin, self.ethtool_bin, iface), self.getLinkStateFinished, callback)
def getLinkStateFinished(self, result, retval, extra_args):
(callback) = extra_args
if not self.linkConsole.appContainers:
callback(result)
def stopPingConsole(self):
if self.pingConsole is not None:
self.pingConsole.killAll()
def stopLinkStateConsole(self):
self.linkConsole.killAll()
def stopDNSConsole(self):
if self.dnsConsole is not None:
self.dnsConsole.killAll()
def stopRestartConsole(self):
self.restartConsole.killAll()
def stopGetInterfacesConsole(self):
self.console.killAll()
def stopDeactivateInterfaceConsole(self):
self.deactivateInterfaceConsole.killAll()
def stopActivateInterfaceConsole(self):
self.activateInterfaceConsole.killAll()
def checkforInterface(self, iface):
if self.getAdapterAttribute(iface, 'up') is True:
return True
else:
ret = os.system("ifconfig " + iface + " up")
os.system("ifconfig " + iface + " down")
if ret == 0:
return True
else:
return False
def checkDNSLookup(self, statecallback):
self.DnsState = 0
self.dnsConsole = Console()
for server in ("www.openpli.org", "www.google.nl", "www.google.com"):
self.dnsConsole.ePopen(("/usr/bin/nslookup", "/usr/bin/nslookup", server), self.checkDNSLookupFinished, statecallback)
def checkDNSLookupFinished(self, result, retval, extra_args):
(statecallback) = extra_args
if self.dnsConsole is not None:
if retval == 0:
self.dnsConsole = None
statecallback(self.DnsState)
else:
self.DnsState += 1
if not self.dnsConsole.appContainers:
statecallback(self.DnsState)
def deactivateInterface(self, ifaces, callback=None):
self.config_ready = False
self.msgPlugins()
commands = []
def buildCommands(iface):
commands.append(("/sbin/ifdown", "/sbin/ifdown", "-f", iface))
commands.append(("/sbin/ip", "/sbin/ip", "addr", "flush", "dev", iface, "scope", "global"))
#wpa_supplicant sometimes doesn't quit properly on SIGTERM
if os.path.exists('/var/run/wpa_supplicant/' + iface):
commands.append("wpa_cli -i" + iface + " terminate")
if isinstance(ifaces, (list, tuple)):
for iface in ifaces:
if iface != 'eth0' or not self.onRemoteRootFS():
buildCommands(iface)
else:
if ifaces == 'eth0' and self.onRemoteRootFS():
if callback is not None:
callback(True)
return
buildCommands(ifaces)
self.deactivateInterfaceConsole.eBatch(commands, self.deactivateInterfaceFinished, (ifaces, callback), debug=True)
def deactivateInterfaceFinished(self, extra_args):
(ifaces, callback) = extra_args
if not self.deactivateInterfaceConsole.appContainers:
if callback is not None:
callback(True)
def activateInterface(self, iface, callback=None):
if self.config_ready:
self.config_ready = False
self.msgPlugins()
if iface == 'eth0' and self.onRemoteRootFS():
if callback is not None:
callback(True)
return
commands = []
commands.append(("/sbin/ifup", "/sbin/ifup", iface))
self.activateInterfaceConsole.eBatch(commands, self.activateInterfaceFinished, callback, debug=True)
def activateInterfaceFinished(self, extra_args):
callback = extra_args
if not self.activateInterfaceConsole.appContainers:
if callback is not None:
callback(True)
def sysfsPath(self, iface):
return '/sys/class/net/' + iface
def isWirelessInterface(self, iface):
if iface in self.wlan_interfaces:
return True
if os.path.isdir(self.sysfsPath(iface) + '/wireless'):
return True
# r871x_usb_drv on kernel 2.6.12 is not identifiable over /sys/class/net/'ifacename'/wireless so look also inside /proc/net/wireless
device = re.compile('[a-z]{2,}[0-9]*:')
ifnames = []
fp = open('/proc/net/wireless', 'r')
for line in fp:
try:
ifnames.append(device.search(line).group()[:-1])
except AttributeError:
pass
if iface in ifnames:
return True
return False
def getWlanModuleDir(self, iface=None):
devicedir = self.sysfsPath(iface) + '/device'
if not os.path.isdir(devicedir):
return None
moduledir = devicedir + '/driver/module'
if os.path.isdir(moduledir):
return moduledir
# identification is not possible over default moduledir
for x in os.listdir(devicedir):
# rt3070 on kernel 2.6.18 registers wireless devices as usb_device (e.g. 1-1.3:1.0) and identification is only possible over /sys/class/net/'ifacename'/device/1-xxx
if x.startswith("1-"):
moduledir = devicedir + '/' + x + '/driver/module'
if os.path.isdir(moduledir):
return moduledir
# rt73, zd1211b, r871x_usb_drv on kernel 2.6.12 can be identified over /sys/class/net/'ifacename'/device/driver, so look also here
moduledir = devicedir + '/driver'
if os.path.isdir(moduledir):
return moduledir
return None
def detectWlanModule(self, iface=None):
if not self.isWirelessInterface(iface):
return None
devicedir = self.sysfsPath(iface) + '/device'
if os.path.isdir(devicedir + '/ieee80211'):
return 'nl80211'
moduledir = self.getWlanModuleDir(iface)
if moduledir:
module = os.path.basename(os.path.realpath(moduledir))
if module in ('ath_pci', 'ath5k'):
return 'madwifi'
if module in ('rt73', 'rt73'):
return 'ralink'
if module == 'zd1211b':
return 'zydas'
if module == 'brcm-systemport':
return 'brcm-wl'
return 'wext'
def calc_netmask(self, nmask):
from struct import pack
from socket import inet_ntoa
mask = 1L << 31
xnet = (1L << 32) - 1
cidr_range = range(0, 32)
cidr = long(nmask)
if cidr not in cidr_range:
print 'cidr invalid: %d' % cidr
return None
else:
nm = ((1L << cidr) - 1) << (32 - cidr)
netmask = str(inet_ntoa(pack('>L', nm)))
return netmask
def msgPlugins(self):
if self.config_ready is not None:
for p in plugins.getPlugins(PluginDescriptor.WHERE_NETWORKCONFIG_READ):
p(reason=self.config_ready)
def hotplug(self, event):
interface = event['INTERFACE']
if self.isBlacklisted(interface):
return
action = event['ACTION']
if action == "add":
print "[Network] Add new interface:", interface
self.getAddrInet(interface, None)
elif action == "remove":
print "[Network] Removed interface:", interface
try:
del self.ifaces[interface]
except KeyError:
pass
iNetwork = Network()
def InitNetwork():
pass<|fim▁end|> | currif = "" |
<|file_name|>writer.rs<|end_file_name|><|fim▁begin|>use std::fs::OpenOptions;
use std::io::Result;
use std::path::Path;
use byteorder::{ BigEndian, LittleEndian, WriteBytesExt };
pub fn write_pcm(filename: &str, samples: Vec<i16>) -> Result<()> {
let path = Path::new(filename);
let mut f = try!(OpenOptions::new().write(true).truncate(true).create(true).open(&path));
for &sample in samples.iter() {
try!(f.write_i16::<LittleEndian>(sample));
}
Ok(())
}
// See: https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
pub fn write_wav(filename: &str, sample_rate: usize, samples: Vec<i16>) -> Result<()> {
let path = Path::new(filename);
let mut f = try!(OpenOptions::new().write(true).truncate(true).create(true).open(&path));
// Some WAV header fields<|fim▁hole|> let subchunk_2_size = samples.len() * channels * bit_depth / 8;
let chunk_size = 36 + subchunk_2_size as i32;
let byte_rate = (sample_rate * channels * bit_depth / 8) as i32;
let block_align = (channels * bit_depth / 8) as i16;
try!(f.write_i32::<BigEndian>(0x52494646)); // ChunkID, RIFF
try!(f.write_i32::<LittleEndian>(chunk_size)); // ChunkSize
try!(f.write_i32::<BigEndian>(0x57415645)); // Format, WAVE
try!(f.write_i32::<BigEndian>(0x666d7420)); // Subchunk1ID, fmt
try!(f.write_i32::<LittleEndian>(16)); // Subchunk1Size, 16 for PCM
try!(f.write_i16::<LittleEndian>(1)); // AudioFormat, PCM = 1 (linear quantization)
try!(f.write_i16::<LittleEndian>(channels as i16)); // NumChannels
try!(f.write_i32::<LittleEndian>(sample_rate as i32)); // SampleRate
try!(f.write_i32::<LittleEndian>(byte_rate)); // ByteRate
try!(f.write_i16::<LittleEndian>(block_align)); // BlockAlign
try!(f.write_i16::<LittleEndian>(bit_depth as i16)); // BitsPerSample
try!(f.write_i32::<BigEndian>(0x64617461)); // Subchunk2ID, data
try!(f.write_i32::<LittleEndian>(subchunk_2_size as i32)); // Subchunk2Size, number of bytes in the data
for sample in samples.iter() {
try!(f.write_i16::<LittleEndian>(*sample))
}
Ok(())
}<|fim▁end|> | let channels = 1;
let bit_depth = 16; |
<|file_name|>mssmUtils.cpp<|end_file_name|><|fim▁begin|>#include "mssmUtils.h"
#include "softsusy.h"
#include <iostream>
namespace softsusy {
double sw2 = 1.0 - sqr(MW / MZ),
gnuL = 0.5,
guL = 0.5 - 2.0 * sw2 / 3.0,
gdL = -0.5 + sw2 / 3.0,
geL = -0.5 + sw2,
guR = 2.0 * sw2 / 3.0,
gdR = -sw2 / 3.0,
geR = -sw2,
yuL = 1.0 / 3.0,
yuR = -4.0 / 3.0,
ydL = 1.0 / 3.0,
ydR = 2.0 / 3.0,
yeL = -1.0,
yeR = 2.0,
ynuL = -1.0;
void generalBcs(MssmSoftsusy & m, const DoubleVector & inputParameters) {
MssmSusy s; SoftParsMssm r;
double m3sq = m.displayM3Squared();
s = m.displaySusy();
r.set(inputParameters);
r.setM3Squared(m3sq);
m.setSoftPars(r);
m.setSusy(s);
return;
}
/// This one doesn't overwrite mh1sq or mh2sq at the high scale
void generalBcs2(MssmSoftsusy & m, const DoubleVector & inputParameters) {
MssmSusy s; SoftParsMssm r;
double mh1sq = m.displayMh1Squared();
double mh2sq = m.displayMh2Squared();
double m3sq = m.displayM3Squared();
s = m.displaySusy();
r.set(inputParameters);
r.setMh1Squared(mh1sq);
r.setMh2Squared(mh2sq);
r.setM3Squared(m3sq);
m.setSoftPars(r);
m.setSusy(s);
return;
}
void extendedSugraBcs(MssmSoftsusy & m, const DoubleVector & inputParameters) {
int i;
for (i=1; i<=3; i++) m.setGauginoMass(i, inputParameters.display(i));
if (inputParameters.display(25) > 1. && m.displaySetTbAtMX())
m.setTanb(inputParameters.display(25));
m.setTrilinearElement(UA, 1, 1, m.displayYukawaElement(YU, 1, 1) *
inputParameters.display(11));
m.setTrilinearElement(UA, 2, 2, m.displayYukawaElement(YU, 2, 2) *
inputParameters.display(11));
m.setTrilinearElement(UA, 3, 3, m.displayYukawaElement(YU, 3, 3) *
inputParameters.display(11));
m.setTrilinearElement(DA, 1, 1, m.displayYukawaElement(YD, 1, 1) *
inputParameters.display(12));
m.setTrilinearElement(DA, 2, 2, m.displayYukawaElement(YD, 2, 2) *
inputParameters.display(12));
m.setTrilinearElement(DA, 3, 3, m.displayYukawaElement(YD, 3, 3) *
inputParameters.display(12));
m.setTrilinearElement(EA, 1, 1, m.displayYukawaElement(YE, 1, 1) *
inputParameters.display(13));
m.setTrilinearElement(EA, 2, 2, m.displayYukawaElement(YE, 2, 2) *
inputParameters.display(13));
m.setTrilinearElement(EA, 3, 3, m.displayYukawaElement(YE, 3, 3) *
inputParameters.display(13));
m.setSoftMassElement(mLl, 1, 1, signedSqr(inputParameters.display(31)));
m.setSoftMassElement(mLl, 2, 2, signedSqr(inputParameters.display(32)));
m.setSoftMassElement(mLl, 3, 3, signedSqr(inputParameters.display(33)));
m.setSoftMassElement(mEr, 1, 1, signedSqr(inputParameters.display(34)));
m.setSoftMassElement(mEr, 2, 2, signedSqr(inputParameters.display(35)));
m.setSoftMassElement(mEr, 3, 3, signedSqr(inputParameters.display(36)));
m.setSoftMassElement(mQl, 1, 1, signedSqr(inputParameters.display(41)));
m.setSoftMassElement(mQl, 2, 2, signedSqr(inputParameters.display(42)));
m.setSoftMassElement(mQl, 3, 3, signedSqr(inputParameters.display(43)));
m.setSoftMassElement(mUr, 1, 1, signedSqr(inputParameters.display(44)));
m.setSoftMassElement(mUr, 2, 2, signedSqr(inputParameters.display(45)));
m.setSoftMassElement(mUr, 3, 3, signedSqr(inputParameters.display(46)));
m.setSoftMassElement(mDr, 1, 1, signedSqr(inputParameters.display(47)));
m.setSoftMassElement(mDr, 2, 2, signedSqr(inputParameters.display(48)));
m.setSoftMassElement(mDr, 3, 3, signedSqr(inputParameters.display(49)));
if (!m.displayAltEwsb()) {
m.setMh1Squared(inputParameters.display(21));
m.setMh2Squared(inputParameters.display(22));
}
}
/// universal mSUGRA boundary conditions
void sugraBcs(MssmSoftsusy & m, const DoubleVector & inputParameters) {
double m0 = inputParameters.display(1);
double m12 = inputParameters.display(2);
double a0 = inputParameters.display(3);
/// Sets scalar soft masses equal to m0, fermion ones to m12 and sets the
/// trilinear scalar coupling to be a0
/// if (m0 < 0.0) m.flagTachyon(true); Deleted on request from A Pukhov
m.standardSugra(m0, m12, a0);
return;
}
void nuhmI(MssmSoftsusy & m, const DoubleVector & inputParameters) {
double m0 = inputParameters.display(1);
double m12 = inputParameters.display(2);
double mH = inputParameters.display(3);
double a0 = inputParameters.display(4);
/// Sets scalar soft masses equal to m0, fermion ones to m12 and sets the
/// trilinear scalar coupling to be a0
/// if (m0 < 0.0) m.flagTachyon(true); Deleted on request from A Pukhov
m.standardSugra(m0, m12, a0);
m.setMh1Squared(mH * mH); m.setMh2Squared(mH * mH);
return;
}
void nuhmII(MssmSoftsusy & m, const DoubleVector & inputParameters) {
double m0 = inputParameters.display(1);
double m12 = inputParameters.display(2);
double mH1 = inputParameters.display(3);
double mH2 = inputParameters.display(4);
double a0 = inputParameters.display(5);
/// Sets scalar soft masses equal to m0, fermion ones to m12 and sets the
/// trilinear scalar coupling to be a0
/// if (m0 < 0.0) m.flagTachyon(true); Deleted on request from A Pukhov
m.standardSugra(m0, m12, a0);
m.setMh1Squared(mH1 * mH1); m.setMh2Squared(mH2 * mH2);
return;
}
/// Other types of boundary condition
void amsbBcs(MssmSoftsusy & m, const DoubleVector & inputParameters) {
double m32 = inputParameters.display(1);
double m0 = inputParameters.display(2);
m.standardSugra(m0, 0., 0.);
m.addAmsb(m32);
return;
}
void lvsBcs(MssmSoftsusy & m, const DoubleVector & inputParameters) {
double m0 = inputParameters.display(1);
double m12 = inputParameters.display(1) * sqrt(3.);
double a0 = -inputParameters.display(1) * sqrt(3.);
m.standardSugra(m0, m12, a0);
return;
}
void gmsbBcs(MssmSoftsusy & m, const DoubleVector & inputParameters) {
int n5 = int(inputParameters.display(1));
double mMess = inputParameters.display(2);
double lambda = inputParameters.display(3);
double cgrav = inputParameters.display(4);
m.minimalGmsb(n5, lambda, mMess, cgrav);
return;
}
void userDefinedBcs(MssmSoftsusy & m, const DoubleVector & inputParameters) {
m.methodBoundaryCondition(inputParameters);
sugraBcs(m, inputParameters);
}
void nonUniGauginos(MssmSoftsusy & m, const DoubleVector & inputParameters) {
double m0 = inputParameters.display(1);
double m12 = inputParameters.display(2);
double a0 = inputParameters.display(3);
/// Sets scalar soft masses equal to m0, fermion ones to m12 and sets the
/// trilinear scalar coupling to be a0
/// if (m0 < 0.0) m.flagTachyon(true); Deleted on request from A Pukhov
m.standardSugra(m0, m12, a0);
m.setGauginoMass(2, inputParameters.display(4));
m.setGauginoMass(3, inputParameters.display(5));
return;
}
// Boundary conditions of split gauge mediated SUSY breaking (see
// http://www.physics.rutgers.edu/~somalwar/conlsp/slepton-coNLSP.pdf
// for example). Note that here, mu is set at mMess instead of at the
// electroweak scale.
void splitGmsb(MssmSoftsusy & m, const DoubleVector & inputParameters) {
double n5 = inputParameters(1);
double lambdaL = inputParameters(2);
double lambdaD = inputParameters(3);
double mMess = inputParameters(4);
double muOm2 = inputParameters(5);
double mAOm2 = inputParameters(6);
double cgrav = inputParameters(7);
double lambda1 = n5 * (0.6 * lambdaL + 0.4 * lambdaD);
double lambda2 = n5 * lambdaL;
double lambda3 = n5 * lambdaD;
double m1, m2, m3;
m1 = sqr(m.displayGaugeCoupling(1)) / (16.0 * sqr(PI)) * lambda1;
m2 = sqr(m.displayGaugeCoupling(2)) / (16.0 * sqr(PI)) * lambda2;
m3 = sqr(m.displayGaugeCoupling(3)) / (16.0 * sqr(PI)) * lambda3;
m.setGauginoMass(1, m1);
m.setGauginoMass(2, m2);
m.setGauginoMass(3, m3);
m.setM32(2.37e-19 * sqrt((sqr(lambdaL) + sqr(lambdaD)) * 0.5) *
mMess * cgrav);
m.setM32(2.37e-19 * sqrt((sqr(lambdaL) + sqr(lambdaD)) * 0.5) *
mMess * cgrav);
double g1f = sqr(sqr(m.displayGaugeCoupling(1)));
double g2f = sqr(sqr(m.displayGaugeCoupling(2)));
double g3f = sqr(sqr(m.displayGaugeCoupling(3)));
double lambdaP1sq = n5 * (0.6 * sqr(lambdaL) + 0.4 * sqr(lambdaD));
double lambdaP2sq = n5 * sqr(lambdaL);
double lambdaP3sq = n5 * sqr(lambdaD);
double mursq, mdrsq, mersq, mqlsq, mllsq;
mursq = 2.0 *
(4.0 / 3.0 * g3f * lambdaP3sq + 0.6 * 4.0 / 9.0 * g1f * lambdaP1sq)
/ sqr(16.0 * sqr(PI));
mdrsq = 2.0 *
(4.0 / 3.0 * g3f * lambdaP3sq + 0.6 * 1.0 / 9.0 * g1f * lambdaP1sq)
/ sqr(16.0 * sqr(PI));
mersq = 2.0 *
(0.6 * g1f * lambdaP1sq)
/ sqr(16.0 * sqr(PI));
mqlsq = 2.0 *
(4.0 / 3.0 * g3f * lambdaP3sq + 0.75 * g2f * lambdaP2sq +
0.6 * g1f / 36.0 * lambdaP1sq)
/ sqr(16.0 * sqr(PI));
mllsq = 2.0 *
(0.75 * g2f * lambdaP2sq + 0.6 * 0.25 * g1f * lambdaP1sq)
/ sqr(16.0 * sqr(PI));
// You need Higgs masses too!
DoubleMatrix id(3, 3);
id(1, 1) = 1.0; id(2, 2) = 1.0; id(3, 3) = 1.0;
m.setSoftMassMatrix(mQl, mqlsq * id);
m.setSoftMassMatrix(mUr, mursq * id);
m.setSoftMassMatrix(mDr, mdrsq * id);
m.setSoftMassMatrix(mLl, mllsq * id);
m.setSoftMassMatrix(mEr, mersq * id);
m.universalTrilinears(0.0);
DoubleVector pars(2); ///< encodes EWSB BC
pars(1) = muOm2 * m2;
pars(2) = mAOm2 * m2;
/// Save the two parameters
m.setEwsbConditions(pars);
}
/// Returns true if a point passes the Higgs constraint from LEP2, false
/// otherwise. Error is the amount of uncertainty on SOFTSUSY's mh prediction
bool testLEPHiggs(const MssmSoftsusy & r, double error) {
double Mh = r.displayPhys().mh0(1);
Mh = Mh + error;
double sinba2 = sqr(sin(atan(r.displayTanb()) - r.displayPhys().thetaH));
/// cout << "sinba2=" << sinba2 << endl;
if (Mh < 90.0) return false;
else if (90.0 <= Mh && Mh < 99.0) {
if (sinba2 < -6.1979 + 0.12313 * Mh - 0.00058411 * sqr(Mh)) return true;
else return false;
}
else if (99.0 <= Mh && Mh < 104.0) {
if (sinba2 < 35.73 - 0.69747 * Mh + 0.0034266 * sqr(Mh)) return true;
else return false;
}
else if (104.0 <= Mh && Mh < 109.5) {
if (sinba2 < 21.379 - 0.403 * Mh + 0.0019211 * sqr(Mh)) return true;
else return false;
}
else if (109.5 <= Mh && Mh < 114.4) {
if (sinba2 < 1/(60.081 - 0.51624 * Mh)) return true;
else return false;
}
return true;
}
/// from hep-ph/9507294 -- debugged 19/11/04
double ufb3fn(double mu, double htau, double h2, int family, const MssmSoftsusy
& temp) {
double vufb3 = 0.0;
/// potential value for these VEVs
if (fabs(h2) >
sqrt(sqr(mu) / (4.0 * sqr(htau)) +
4.0 * temp.displaySoftMassSquared(mLl, family, family) /
(0.6 * sqr(temp.displayGaugeCoupling(1)) +
sqr(temp.displayGaugeCoupling(2)))) - fabs(mu) /
temp.displayYukawaElement(YE, 3, 3) * 0.5)
vufb3 =
sqr(h2) * (temp.displayMh2Squared() +
temp.displaySoftMassSquared(mLl, family, family)) +
fabs(mu * h2) / htau *
(temp.displaySoftMassSquared(mLl, 3, 3) +
temp.displaySoftMassSquared(mEr, 3, 3)
+ temp.displaySoftMassSquared(mLl, family, family)) -
2.0 * sqr(temp.displaySoftMassSquared(mLl, family, family)) /
(0.6 * sqr(temp.displayGaugeCoupling(1)) +
sqr(temp.displayGaugeCoupling(2)));
else
vufb3 =
sqr(h2) * temp.displayMh2Squared() +
fabs(mu * h2) / htau *
(temp.displaySoftMassSquared(mLl, 3, 3) +
temp.displaySoftMassSquared(mEr, 3, 3)) +
1.0 / 8.0 * (0.6 * sqr(temp.displayGaugeCoupling(1)) +
sqr(temp.displayGaugeCoupling(2))) *
sqr(sqr(h2) + fabs(mu * h2) / htau);
if (PRINTOUT > 1) cout << vufb3 << endl;
return vufb3;
}
/// For ufb3direction, returns scale at which one-loop corrections are smallest
double getQhat(double inminTol,double eR, double h2, double Lisq, double mx,
MssmSoftsusy & temp) {
double oldQhat = -1.0e16;
int maxNum = 40;
int d; for (d = 1; d <= maxNum; d++) {
double qhat =
maximum(maximum(maximum(temp.displayGaugeCoupling(2) * eR,
temp.displayGaugeCoupling(2) * fabs(h2)),
temp.displayGaugeCoupling(2) * sqrt(fabs(Lisq))),
temp.displayYukawaElement(YU, 3, 3) * fabs(h2));
/// Run all paramaters to that scale
if (qhat < mx) temp.runto(qhat);
else temp.runto(mx);
if (PRINTOUT > 1) cout << qhat << " ";
if (fabs((qhat - oldQhat) / qhat) < inminTol) return qhat;
oldQhat = qhat;
}
/// Return NOB if no convergence on qhat
return -numberOfTheBeast;
}
/// Difference between two SOFTSUSY objects in and out: EWSB terms only
double sumTol(const MssmSoftsusy & in, const MssmSoftsusy & out, int numTries) {
drBarPars inforLoops(in.displayDrBarPars()),
outforLoops(out.displayDrBarPars());
DoubleVector sT(34);
int k = 1;
double sTin = fabs(inforLoops.mh0(1)); double sTout = fabs(outforLoops.mh0(1));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout)); k++;
sTin = fabs(inforLoops.mA0(1)); sTout = fabs(outforLoops.mA0(1));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout)); k++;
sTin = fabs(inforLoops.mh0(2)); sTout = fabs(outforLoops.mh0(2));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout)); k++;
sTin = fabs(inforLoops.mHpm); sTout = fabs(outforLoops.mHpm);
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout)); k++;
int i; for (i=1; i<=3; i++) {
sTin = fabs(inforLoops.msnu(i));
sTout = fabs(outforLoops.msnu(i));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout));
k++;
}
for (i=1; i<=2; i++) {
sTin = fabs(inforLoops.mch(i));
sTout = fabs(outforLoops.mch(i));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout));
k++;
}
for (i=1; i<=4; i++) {
sTin = fabs(inforLoops.mneut(i));
sTout = fabs(outforLoops.mneut(i));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout));
k++;
}
sTin = fabs(inforLoops.mGluino);
sTout = fabs(outforLoops.mGluino);
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout));
k++;
int j; for (j=1; j<=3; j++)
for(i=1; i<=2; i++) {
sTin = fabs(inforLoops.mu(i, j));
sTout = fabs(outforLoops.mu(i, j));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout));
k++;
sTin = fabs(inforLoops.md(i, j));
sTout = fabs(outforLoops.md(i, j));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout));
k++;
sTin = fabs(inforLoops.me(i, j));
sTout = fabs(outforLoops.me(i, j));
sT(k) = fabs(1.0 - minimum(sTin, sTout) / maximum(sTin, sTout));
k++;
}
/// The predicted value of MZ^2 is an absolute measure of how close to a
/// true solution we are:
// double tbPred = 0.;
double predictedMzSq = in.displayPredMzSq();
/// We allow an extra factor of 10 for the precision in the predicted value
/// of MZ compared to TOLERANCE if the program is struggling and gone beyond
/// 10 tries - an extra 2 comes from MZ v MZ^2
if (!in.displayProblem().testSeriousProblem()) {
sT(k) = 0.5 *
fabs(1. - minimum(predictedMzSq, sqr(MZ)) /
maximum(sqr(MZ), predictedMzSq));
if (numTries > 10) sT(k) *= 0.1;
}
return sT.max();
}
/// Prints out what the lsp is
string recogLsp(int temp, int posj) {
string out;
switch(temp) {
case -1: out = "gravitino"; break;
case 0: out = "neutralino"; break;
case 1:
switch(posj) {
case 3: out = "stop"; break;
case 2: out = "scharm"; break;
case 1: out = "sup"; break;
} break;
case 2:
switch(posj) {
case 3: out = "sbottom"; break;
case 2: out = "sstange"; break;
case 1: out = "sdown"; break;
} break;<|fim▁hole|> case 2: out = "smu"; break;
case 1: out = "selectron"; break;
} break;
case 4: out = "chargino"; break;
case 5: out = "sneutrino"; break;
case 6: out = "gluino"; break;
default:
ostringstream ii;
ii << "Wrong input to lsp printing routine\n";
throw ii.str(); break;
}
return out;
}
ostream & operator <<(ostream &left, const MssmSoftsusy &s) {
left << HR << endl;
left << "Gravitino mass M3/2: " << s.displayGravitino() << endl;
left << "Msusy: " << s.displayMsusy() << " MW: " << s.displayMw()
<< " Predicted MZ: " << sqrt(s.displayPredMzSq()) << endl;
left << "Data set:\n" << s.displayDataSet();
left << HR << endl;
left << s.displaySoftPars();
left << "t1/v1(MS)=" << s.displayTadpole1Ms()
<< " t2/v2(MS)=" << s.displayTadpole2Ms() << endl;
left << HR << "\nPhysical MSSM parameters:\n";
left << s.displayPhys();
double mass; int posi, posj, id;
id = s.lsp(mass, posi, posj);
/// If the gravitino mass is non-zero, and if it is smaller than the visible
/// sector LSP mass, make it clear that the particle is the NLSP
left << "lsp is " << recogLsp(id, posj);
left << " of mass " << mass << " GeV\n";
if (s.displayProblem().test()) left << "***** PROBLEM *****" <<
s.displayProblem() << " *****" << endl;
left << HR << endl;
if (s.displaySetTbAtMX()) left << "Tan beta is set at user defined scale\n";
if (s.displayAltEwsb()) left << "Alternative EWSB conditions: mu="
<< s.displayMuCond()
<< " mA=" << s.displayMaCond() << endl;
return left;
}
} // namespace softsusy<|fim▁end|> | case 3:
switch(posj) {
case 3: out = "stau"; break; |
<|file_name|>snapscroll.py<|end_file_name|><|fim▁begin|>#############################################################################
##
## Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
## Contact: Qt Software Information ([email protected])
##
## This file is part of the Graphics Dojo project on Qt Labs.
##
## This file may be used under the terms of the GNU General Public
## License version 2.0 or 3.0 as published by the Free Software Foundation
## and appearing in the file LICENSE.GPL included in the packaging of
## this file. Please review the following information to ensure GNU
## General Public Licensing requirements will be met:
## http:#www.fsf.org/licensing/licenses/info/GPLv2.html and
## http:#www.gnu.org/copyleft/gpl.html.
##
## If you are unsure which license is appropriate for your use, please
## contact the sales department at [email protected].
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
if QT_VERSION < 0x0040500:
sys.stderr.write("You need Qt 4.5 or newer to run this example.\n")
sys.exit(1)
SNAP_THRESHOLD = 10
class SnapView(QWebView):
def __init__(self):
QWebView.__init__(self)
self.snapEnabled = True
self.setWindowTitle(self.tr("Snap-scrolling is ON"))
# rects hit by the line, in main frame's view coordinate
def hitBoundingRects(self, line):
hitRects = []
points = 8
delta = QPoint(line.dx() / points, line.dy() / points)
point = line.p1()
i = 0
while i < points - 1:
point += delta
hit = self.page().mainFrame().hitTestContent(point)
if not hit.boundingRect().isEmpty():
hitRects.append(hit.boundingRect())<|fim▁hole|>
return hitRects
def keyPressEvent(self, event):
# toggle snapping
if event.key() == Qt.Key_F3:
self.snapEnabled = not self.snapEnabled
if self.snapEnabled:
self.setWindowTitle(self.tr("Snap-scrolling is ON"))
else:
self.setWindowTitle(self.tr("Snap-scrolling is OFF"))
event.accept()
return
# no snapping? do not bother...
if not self.snapEnabled:
QWebView.keyReleaseEvent(self, event)
return
previousOffset = self.page().mainFrame().scrollPosition()
QWebView.keyReleaseEvent(self, event)
if not event.isAccepted():
return
if event.key() == Qt.Key_Down:
ofs = self.page().mainFrame().scrollPosition()
jump = ofs.y() - previousOffset.y()
if jump == 0:
return
jump += SNAP_THRESHOLD
rects = self.hitBoundingRects(QLine(1, 1, self.width() - 1, 1))
i = 0
while i < len(rects):
j = rects[i].top() - previousOffset.y()
if j > SNAP_THRESHOLD and j < jump:
jump = j
i += 1
self.page().mainFrame().setScrollPosition(previousOffset + QPoint(0, jump))
if __name__ == "__main__":
app = QApplication(sys.argv)
view = SnapView()
view.load(QUrl("http://news.bbc.co.uk/text_only.stm"))
view.resize(320, 500)
view.show()
QMessageBox.information(view, "Hint", "Use F3 to toggle snapping on and off")
sys.exit(app.exec_())<|fim▁end|> | i += 1 |
<|file_name|>mac.py<|end_file_name|><|fim▁begin|>#Copyright 2013 Paul Barton
#
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
import Quartz
from AppKit import NSEvent, NSScreen
from .base import PyMouseMeta, PyMouseEventMeta
pressID = [None, Quartz.kCGEventLeftMouseDown,
Quartz.kCGEventRightMouseDown, Quartz.kCGEventOtherMouseDown]
releaseID = [None, Quartz.kCGEventLeftMouseUp,
Quartz.kCGEventRightMouseUp, Quartz.kCGEventOtherMouseUp]
class PyMouse(PyMouseMeta):
def press(self, x, y, button=1):
event = Quartz.CGEventCreateMouseEvent(None,
pressID[button],
(x, y),
button - 1)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, event)
def release(self, x, y, button=1):
event = Quartz.CGEventCreateMouseEvent(None,
releaseID[button],
(x, y),
button - 1)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, event)
def move(self, x, y):
move = Quartz.CGEventCreateMouseEvent(None, Quartz.kCGEventMouseMoved, (x, y), 0)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, move)
def drag(self, x, y):
drag = Quartz.CGEventCreateMouseEvent(None, Quartz.kCGEventLeftMouseDragged, (x, y), 0)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, drag)
def position(self):
loc = NSEvent.mouseLocation()
return loc.x, Quartz.CGDisplayPixelsHigh(0) - loc.y
def screen_size(self):
return NSScreen.mainScreen().frame().size.width, NSScreen.mainScreen().frame().size.height
def scroll(self, vertical=None, horizontal=None, depth=None):
#Local submethod for generating Mac scroll events in one axis at a time
def scroll_event(y_move=0, x_move=0, z_move=0, n=1):
for _ in range(abs(n)):
scrollWheelEvent = Quartz.CGEventCreateScrollWheelEvent(<|fim▁hole|> Quartz.kCGScrollEventUnitLine, # Unit of measurement is lines
3, # Number of wheels(dimensions)
y_move,
x_move,
z_move)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, scrollWheelEvent)
#Execute vertical then horizontal then depth scrolling events
if vertical is not None:
vertical = int(vertical)
if vertical == 0: # Do nothing with 0 distance
pass
elif vertical > 0: # Scroll up if positive
scroll_event(y_move=1, n=vertical)
else: # Scroll down if negative
scroll_event(y_move=-1, n=abs(vertical))
if horizontal is not None:
horizontal = int(horizontal)
if horizontal == 0: # Do nothing with 0 distance
pass
elif horizontal > 0: # Scroll right if positive
scroll_event(x_move=1, n=horizontal)
else: # Scroll left if negative
scroll_event(x_move=-1, n=abs(horizontal))
if depth is not None:
depth = int(depth)
if depth == 0: # Do nothing with 0 distance
pass
elif vertical > 0: # Scroll "out" if positive
scroll_event(z_move=1, n=depth)
else: # Scroll "in" if negative
scroll_event(z_move=-1, n=abs(depth))
class PyMouseEvent(PyMouseEventMeta):
def run(self):
tap = Quartz.CGEventTapCreate(
Quartz.kCGSessionEventTap,
Quartz.kCGHeadInsertEventTap,
Quartz.kCGEventTapOptionDefault,
Quartz.CGEventMaskBit(Quartz.kCGEventMouseMoved) |
Quartz.CGEventMaskBit(Quartz.kCGEventLeftMouseDown) |
Quartz.CGEventMaskBit(Quartz.kCGEventLeftMouseUp) |
Quartz.CGEventMaskBit(Quartz.kCGEventRightMouseDown) |
Quartz.CGEventMaskBit(Quartz.kCGEventRightMouseUp) |
Quartz.CGEventMaskBit(Quartz.kCGEventOtherMouseDown) |
Quartz.CGEventMaskBit(Quartz.kCGEventOtherMouseUp),
self.handler,
None)
loopsource = Quartz.CFMachPortCreateRunLoopSource(None, tap, 0)
loop = Quartz.CFRunLoopGetCurrent()
Quartz.CFRunLoopAddSource(loop, loopsource, Quartz.kCFRunLoopDefaultMode)
Quartz.CGEventTapEnable(tap, True)
while self.state:
Quartz.CFRunLoopRunInMode(Quartz.kCFRunLoopDefaultMode, 5, False)
def handler(self, proxy, type, event, refcon):
(x, y) = Quartz.CGEventGetLocation(event)
if type in pressID:
self.click(x, y, pressID.index(type), True)
elif type in releaseID:
self.click(x, y, releaseID.index(type), False)
else:
self.move(x, y)
if self.capture:
Quartz.CGEventSetType(event, Quartz.kCGEventNull)
return event<|fim▁end|> | None, # No source |
<|file_name|>uploads.py<|end_file_name|><|fim▁begin|>from flask.ext import uploads
allowed = uploads.IMAGES<|fim▁hole|> allow=allowed,
deny=())
images = uploads.UploadSet('images', allowed)
images._config = _config<|fim▁end|> | _config = uploads.UploadConfiguration(
'./Application/static/uploads/',
base_url='/static/uploads/', |
<|file_name|>framed.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use std::cmp;
use std::io;<|fim▁hole|>use std::io::{ErrorKind, Read, Write};
use super::{TReadTransport, TReadTransportFactory, TWriteTransport, TWriteTransportFactory};
/// Default capacity of the read buffer in bytes.
const READ_CAPACITY: usize = 4096;
/// Default capacity of the write buffer in bytes.
const WRITE_CAPACITY: usize = 4096;
/// Transport that reads framed messages.
///
/// A `TFramedReadTransport` maintains a fixed-size internal read buffer.
/// On a call to `TFramedReadTransport::read(...)` one full message - both
/// fixed-length header and bytes - is read from the wrapped channel and
/// buffered. Subsequent read calls are serviced from the internal buffer
/// until it is exhausted, at which point the next full message is read
/// from the wrapped channel.
///
/// # Examples
///
/// Create and use a `TFramedReadTransport`.
///
/// ```no_run
/// use std::io::Read;
/// use thrift::transport::{TFramedReadTransport, TTcpChannel};
///
/// let mut c = TTcpChannel::new();
/// c.open("localhost:9090").unwrap();
///
/// let mut t = TFramedReadTransport::new(c);
///
/// t.read(&mut vec![0u8; 1]).unwrap();
/// ```
#[derive(Debug)]
pub struct TFramedReadTransport<C>
where
C: Read,
{
buf: Box<[u8]>,
pos: usize,
cap: usize,
chan: C,
}
impl<C> TFramedReadTransport<C>
where
C: Read,
{
/// Create a `TFramedTransport` with default-sized internal read and
/// write buffers that wraps the given `TIoChannel`.
pub fn new(channel: C) -> TFramedReadTransport<C> {
TFramedReadTransport::with_capacity(READ_CAPACITY, channel)
}
/// Create a `TFramedTransport` with an internal read buffer of size
/// `read_capacity` and an internal write buffer of size
/// `write_capacity` that wraps the given `TIoChannel`.
pub fn with_capacity(read_capacity: usize, channel: C) -> TFramedReadTransport<C> {
TFramedReadTransport {
buf: vec![0; read_capacity].into_boxed_slice(),
pos: 0,
cap: 0,
chan: channel,
}
}
}
impl<C> Read for TFramedReadTransport<C>
where
C: Read,
{
fn read(&mut self, b: &mut [u8]) -> io::Result<usize> {
if self.cap - self.pos == 0 {
let message_size = self.chan.read_i32::<BigEndian>()? as usize;
if message_size > self.buf.len() {
return Err(
io::Error::new(
ErrorKind::Other,
format!(
"bytes to be read ({}) exceeds buffer \
capacity ({})",
message_size,
self.buf.len()
),
),
);
}
self.chan.read_exact(&mut self.buf[..message_size])?;
self.pos = 0;
self.cap = message_size as usize;
}
let nread = cmp::min(b.len(), self.cap - self.pos);
b[..nread].clone_from_slice(&self.buf[self.pos..self.pos + nread]);
self.pos += nread;
Ok(nread)
}
}
/// Factory for creating instances of `TFramedReadTransport`.
#[derive(Default)]
pub struct TFramedReadTransportFactory;
impl TFramedReadTransportFactory {
pub fn new() -> TFramedReadTransportFactory {
TFramedReadTransportFactory {}
}
}
impl TReadTransportFactory for TFramedReadTransportFactory {
/// Create a `TFramedReadTransport`.
fn create(&self, channel: Box<Read + Send>) -> Box<TReadTransport + Send> {
Box::new(TFramedReadTransport::new(channel))
}
}
/// Transport that writes framed messages.
///
/// A `TFramedWriteTransport` maintains a fixed-size internal write buffer. All
/// writes are made to this buffer and are sent to the wrapped channel only
/// when `TFramedWriteTransport::flush()` is called. On a flush a fixed-length
/// header with a count of the buffered bytes is written, followed by the bytes
/// themselves.
///
/// # Examples
///
/// Create and use a `TFramedWriteTransport`.
///
/// ```no_run
/// use std::io::Write;
/// use thrift::transport::{TFramedWriteTransport, TTcpChannel};
///
/// let mut c = TTcpChannel::new();
/// c.open("localhost:9090").unwrap();
///
/// let mut t = TFramedWriteTransport::new(c);
///
/// t.write(&[0x00]).unwrap();
/// t.flush().unwrap();
/// ```
#[derive(Debug)]
pub struct TFramedWriteTransport<C>
where
C: Write,
{
buf: Box<[u8]>,
pos: usize,
channel: C,
}
impl<C> TFramedWriteTransport<C>
where
C: Write,
{
/// Create a `TFramedTransport` with default-sized internal read and
/// write buffers that wraps the given `TIoChannel`.
pub fn new(channel: C) -> TFramedWriteTransport<C> {
TFramedWriteTransport::with_capacity(WRITE_CAPACITY, channel)
}
/// Create a `TFramedTransport` with an internal read buffer of size
/// `read_capacity` and an internal write buffer of size
/// `write_capacity` that wraps the given `TIoChannel`.
pub fn with_capacity(write_capacity: usize, channel: C) -> TFramedWriteTransport<C> {
TFramedWriteTransport {
buf: vec![0; write_capacity].into_boxed_slice(),
pos: 0,
channel: channel,
}
}
}
impl<C> Write for TFramedWriteTransport<C>
where
C: Write,
{
fn write(&mut self, b: &[u8]) -> io::Result<usize> {
if b.len() > (self.buf.len() - self.pos) {
return Err(
io::Error::new(
ErrorKind::Other,
format!(
"bytes to be written ({}) exceeds buffer \
capacity ({})",
b.len(),
self.buf.len() - self.pos
),
),
);
}
let nwrite = b.len(); // always less than available write buffer capacity
self.buf[self.pos..(self.pos + nwrite)].clone_from_slice(b);
self.pos += nwrite;
Ok(nwrite)
}
fn flush(&mut self) -> io::Result<()> {
let message_size = self.pos;
if let 0 = message_size {
return Ok(());
} else {
self.channel
.write_i32::<BigEndian>(message_size as i32)?;
}
let mut byte_index = 0;
while byte_index < self.pos {
let nwrite = self.channel.write(&self.buf[byte_index..self.pos])?;
byte_index = cmp::min(byte_index + nwrite, self.pos);
}
self.pos = 0;
self.channel.flush()
}
}
/// Factory for creating instances of `TFramedWriteTransport`.
#[derive(Default)]
pub struct TFramedWriteTransportFactory;
impl TFramedWriteTransportFactory {
pub fn new() -> TFramedWriteTransportFactory {
TFramedWriteTransportFactory {}
}
}
impl TWriteTransportFactory for TFramedWriteTransportFactory {
/// Create a `TFramedWriteTransport`.
fn create(&self, channel: Box<Write + Send>) -> Box<TWriteTransport + Send> {
Box::new(TFramedWriteTransport::new(channel))
}
}
#[cfg(test)]
mod tests {
// use std::io::{Read, Write};
//
// use super::*;
// use ::transport::mem::TBufferChannel;
}<|fim▁end|> | |
<|file_name|>KickOutFlockHandler.java<|end_file_name|><|fim▁begin|>package com.sean.im.client.push.handler;
import com.alibaba.fastjson.JSON;
import com.sean.im.client.constant.Global;<|fim▁hole|>import com.sean.im.client.tray.TrayManager;
import com.sean.im.client.util.MusicUtil;
import com.sean.im.commom.core.Protocol;
import com.sean.im.commom.entity.Message;
/**
* 移除群成员
* @author sean
*/
public class KickOutFlockHandler implements PushHandler
{
@Override
public void execute(Protocol notify)
{
Message msg = JSON.parseObject(notify.getParameter("msg"), Message.class);
// 界面上删除群
long flockId = Long.parseLong(msg.getContent());
MainForm.FORM.getFlockList().removeFlock(flockId);
// 压入消息队列
ApplicationContext.CTX.getMessageQueue().add(msg);
// 提示系统托盘闪烁
TrayManager.getInstance().startLight(0);
MusicUtil.play(Global.Root + "resource/sound/msg.wav");
}
}<|fim▁end|> | import com.sean.im.client.core.ApplicationContext;
import com.sean.im.client.core.PushHandler;
import com.sean.im.client.form.MainForm; |
<|file_name|>MW_Timer.cpp<|end_file_name|><|fim▁begin|>#include "MW_Timer.hpp"
<|fim▁hole|>{
reset();
}
bool MW_Timer::get() const
{
b_mutex.lock();
bool retVal = MPI::Wtime() > expirationTime;
b_mutex.unlock();
return retVal;
}
void MW_Timer::reset()
{
b_mutex.lock();
expirationTime = MPI::Wtime() + period;
b_mutex.unlock();
}<|fim▁end|> | MW_Timer::MW_Timer(double periodInSeconds): period(periodInSeconds) |
<|file_name|>islamic.js<|end_file_name|><|fim▁begin|>define({ root:
//begin v1.x content
{
"months-format-narrow": [
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"11",
"12"
],
"quarters-standAlone-narrow": [
"1",
"2",
"3",
"4"
],
"dateFormatItem-yQQQ": "y QQQ",
"dateFormatItem-yMEd": "EEE, y-M-d",
"dateFormatItem-MMMEd": "E MMM d",
"eraNarrow": [
"AH"
],
"dateTimeFormats-appendItem-Day-Of-Week": "{0} {1}",
"dateFormat-long": "y MMMM d",
"months-format-wide": [
"Muharram",
"Safar",
"Rabiʻ I",
"Rabiʻ II",
"Jumada I",
"Jumada II",
"Rajab",
"Shaʻban",
"Ramadan",
"Shawwal",
"Dhuʻl-Qiʻdah",
"Dhuʻl-Hijjah"
],
"dateTimeFormat-medium": "{1} {0}",
"dateFormatItem-EEEd": "d EEE",<|fim▁hole|> "dateTimeFormats-appendItem-Second": "{0} ({2}: {1})",
"dateFormatItem-yM": "y-M",
"months-standAlone-wide": [
"Muharram",
"Safar",
"Rabiʻ I",
"Rabiʻ II",
"Jumada I",
"Jumada II",
"Rajab",
"Shaʻban",
"Ramadan",
"Shawwal",
"Dhuʻl-Qiʻdah",
"Dhuʻl-Hijjah"
],
"timeFormat-short": "HH:mm",
"quarters-format-wide": [
"Q1",
"Q2",
"Q3",
"Q4"
],
"timeFormat-long": "HH:mm:ss z",
"dateFormatItem-yMMM": "y MMM",
"dateFormatItem-yQ": "y Q",
"dateTimeFormats-appendItem-Era": "{0} {1}",
"months-format-abbr": [
"Muh.",
"Saf.",
"Rab. I",
"Rab. II",
"Jum. I",
"Jum. II",
"Raj.",
"Sha.",
"Ram.",
"Shaw.",
"Dhuʻl-Q.",
"Dhuʻl-H."
],
"timeFormat-full": "HH:mm:ss zzzz",
"dateTimeFormats-appendItem-Week": "{0} ({2}: {1})",
"dateFormatItem-H": "HH",
"months-standAlone-abbr": [
"Muh.",
"Saf.",
"Rab. I",
"Rab. II",
"Jum. I",
"Jum. II",
"Raj.",
"Sha.",
"Ram.",
"Shaw.",
"Dhuʻl-Q.",
"Dhuʻl-H."
],
"quarters-format-abbr": [
"Q1",
"Q2",
"Q3",
"Q4"
],
"quarters-standAlone-wide": [
"Q1",
"Q2",
"Q3",
"Q4"
],
"dateFormatItem-M": "L",
"days-standAlone-wide": [
"1",
"2",
"3",
"4",
"5",
"6",
"7"
],
"timeFormat-medium": "HH:mm:ss",
"dateFormatItem-Hm": "HH:mm",
"quarters-standAlone-abbr": [
"Q1",
"Q2",
"Q3",
"Q4"
],
"eraAbbr": [
"AH"
],
"days-standAlone-abbr": [
"1",
"2",
"3",
"4",
"5",
"6",
"7"
],
"dateFormatItem-d": "d",
"dateFormatItem-ms": "mm:ss",
"quarters-format-narrow": [
"1",
"2",
"3",
"4"
],
"dateFormatItem-h": "h a",
"dateTimeFormat-long": "{1} {0}",
"dayPeriods-format-narrow-am": "AM",
"dateFormatItem-MMMd": "MMM d",
"dateFormatItem-MEd": "E, M-d",
"dateTimeFormat-full": "{1} {0}",
"days-format-wide": [
"1",
"2",
"3",
"4",
"5",
"6",
"7"
],
"dateTimeFormats-appendItem-Day": "{0} ({2}: {1})",
"dateFormatItem-y": "y",
"months-standAlone-narrow": [
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"11",
"12"
],
"dateFormatItem-hm": "h:mm a",
"dateTimeFormats-appendItem-Year": "{0} {1}",
"dateTimeFormats-appendItem-Hour": "{0} ({2}: {1})",
"dayPeriods-format-abbr-pm": "PM",
"days-format-abbr": [
"1",
"2",
"3",
"4",
"5",
"6",
"7"
],
"eraNames": [
"AH"
],
"days-format-narrow": [
"1",
"2",
"3",
"4",
"5",
"6",
"7"
],
"days-standAlone-narrow": [
"1",
"2",
"3",
"4",
"5",
"6",
"7"
],
"dateFormatItem-MMM": "LLL",
"dateTimeFormats-appendItem-Quarter": "{0} ({2}: {1})",
"dayPeriods-format-wide-am": "AM",
"dateTimeFormats-appendItem-Month": "{0} ({2}: {1})",
"dateTimeFormats-appendItem-Minute": "{0} ({2}: {1})",
"dateFormat-short": "yyyy-MM-dd",
"dateFormatItem-yMMMEd": "EEE, y MMM d",
"dateTimeFormats-appendItem-Timezone": "{0} {1}",
"dateFormat-medium": "y MMM d",
"dayPeriods-format-narrow-pm": "PM",
"dateTimeFormat-short": "{1} {0}",
"dateFormatItem-Hms": "HH:mm:ss",
"dateFormatItem-hms": "h:mm:ss a"
}
//end v1.x content
,
"ar": true,
"da": true,
"de": true,
"en": true,
"en-gb": true,
"es": true,
"fi": true,
"fr": true,
"he": true,
"hu": true,
"it": true,
"nb": true,
"nl": true,
"pl": true,
"pt": true,
"pt-pt": true,
"ru": true,
"sv": true,
"th": true,
"tr": true,
"zh": true,
"zh-hant": true
});<|fim▁end|> | "dayPeriods-format-wide-pm": "PM",
"dateFormat-full": "EEEE, y MMMM dd",
"dateFormatItem-Md": "M-d",
"dayPeriods-format-abbr-am": "AM", |
<|file_name|>element.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Element nodes.
use devtools_traits::AttrInfo;
use dom::activation::Activatable;
use dom::attr::{Attr, AttrHelpersForLayout};
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use dom::bindings::codegen::Bindings::ElementBinding;
use dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::Bindings::WindowBinding::{ScrollBehavior, ScrollToOptions};
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::codegen::UnionTypes::NodeOrString;
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{JS, LayoutJS, MutNullableJS};
use dom::bindings::js::{Root, RootedReference};
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::DomObject;
use dom::bindings::str::DOMString;
use dom::bindings::xmlname::{namespace_from_domstring, validate_and_extract, xml_name_type};
use dom::bindings::xmlname::XMLName::InvalidXMLName;
use dom::characterdata::CharacterData;
use dom::create::create_element;
use dom::document::{Document, LayoutDocumentHelpers};
use dom::documentfragment::DocumentFragment;
use dom::domrect::DOMRect;
use dom::domtokenlist::DOMTokenList;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::htmlanchorelement::HTMLAnchorElement;
use dom::htmlbodyelement::{HTMLBodyElement, HTMLBodyElementLayoutHelpers};
use dom::htmlbuttonelement::HTMLButtonElement;
use dom::htmlcanvaselement::{HTMLCanvasElement, LayoutHTMLCanvasElementHelpers};
use dom::htmlcollection::HTMLCollection;
use dom::htmlelement::HTMLElement;
use dom::htmlfieldsetelement::HTMLFieldSetElement;
use dom::htmlfontelement::{HTMLFontElement, HTMLFontElementLayoutHelpers};
use dom::htmlformelement::FormControlElementHelpers;
use dom::htmlhrelement::{HTMLHRElement, HTMLHRLayoutHelpers};
use dom::htmliframeelement::{HTMLIFrameElement, HTMLIFrameElementLayoutMethods};
use dom::htmlimageelement::{HTMLImageElement, LayoutHTMLImageElementHelpers};
use dom::htmlinputelement::{HTMLInputElement, LayoutHTMLInputElementHelpers};
use dom::htmllabelelement::HTMLLabelElement;
use dom::htmllegendelement::HTMLLegendElement;
use dom::htmllinkelement::HTMLLinkElement;
use dom::htmlobjectelement::HTMLObjectElement;
use dom::htmloptgroupelement::HTMLOptGroupElement;
use dom::htmlselectelement::HTMLSelectElement;
use dom::htmlstyleelement::HTMLStyleElement;
use dom::htmltablecellelement::{HTMLTableCellElement, HTMLTableCellElementLayoutHelpers};
use dom::htmltableelement::{HTMLTableElement, HTMLTableElementLayoutHelpers};
use dom::htmltablerowelement::{HTMLTableRowElement, HTMLTableRowElementLayoutHelpers};
use dom::htmltablesectionelement::{HTMLTableSectionElement, HTMLTableSectionElementLayoutHelpers};
use dom::htmltemplateelement::HTMLTemplateElement;
use dom::htmltextareaelement::{HTMLTextAreaElement, LayoutHTMLTextAreaElementHelpers};
use dom::mutationobserver::{Mutation, MutationObserver};
use dom::namednodemap::NamedNodeMap;
use dom::node::{CLICK_IN_PROGRESS, ChildrenMutation, LayoutNodeHelpers, Node};
use dom::node::{NodeDamage, SEQUENTIALLY_FOCUSABLE, UnbindContext};
use dom::node::{document_from_node, window_from_node};
use dom::nodelist::NodeList;
use dom::promise::Promise;
use dom::servoparser::ServoParser;
use dom::text::Text;
use dom::validation::Validatable;
use dom::virtualmethods::{VirtualMethods, vtable_for};
use dom::window::ReflowReason;
use dom_struct::dom_struct;
use html5ever::{Prefix, LocalName, Namespace, QualName};
use html5ever::serialize;
use html5ever::serialize::SerializeOpts;
use html5ever::serialize::TraversalScope;
use html5ever::serialize::TraversalScope::{ChildrenOnly, IncludeNode};
use js::jsapi::{HandleValue, JSAutoCompartment};
use net_traits::request::CorsSettings;
use ref_filter_map::ref_filter_map;
use script_layout_interface::message::ReflowQueryType;
use script_thread::Runnable;
use selectors::attr::{AttrSelectorOperation, NamespaceConstraint, CaseSensitivity};
use selectors::matching::{ElementSelectorFlags, LocalMatchingContext, MatchingContext, MatchingMode};
use selectors::matching::{HAS_EDGE_CHILD_SELECTOR, HAS_SLOW_SELECTOR, HAS_SLOW_SELECTOR_LATER_SIBLINGS};
use selectors::matching::{RelevantLinkStatus, matches_selector_list};
use servo_atoms::Atom;
use std::ascii::AsciiExt;
use std::borrow::Cow;
use std::cell::{Cell, Ref};
use std::convert::TryFrom;
use std::default::Default;
use std::fmt;
use std::rc::Rc;
use style::CaseSensitivityExt;
use style::applicable_declarations::ApplicableDeclarationBlock;
use style::attr::{AttrValue, LengthOrPercentageOrAuto};
use style::context::{QuirksMode, ReflowGoal};
use style::element_state::*;
use style::invalidation::element::restyle_hints::RESTYLE_SELF;
use style::properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock, parse_style_attribute};
use style::properties::longhands::{self, background_image, border_spacing, font_family, font_size, overflow_x};
use style::rule_tree::CascadeLevel;
use style::selector_parser::{NonTSPseudoClass, PseudoElement, RestyleDamage, SelectorImpl, SelectorParser};
use style::selector_parser::extended_filtering;
use style::shared_lock::{SharedRwLock, Locked};
use style::sink::Push;
use style::stylearc::Arc;
use style::thread_state;
use style::values::{CSSFloat, Either};
use style::values::specified;
use stylesheet_loader::StylesheetOwner;
// TODO: Update focus state when the top-level browsing context gains or loses system focus,
// and when the element enters or leaves a browsing context container.
// https://html.spec.whatwg.org/multipage/#selector-focus
#[dom_struct]
pub struct Element {
node: Node,
local_name: LocalName,
tag_name: TagName,
namespace: Namespace,
prefix: Option<Prefix>,
attrs: DOMRefCell<Vec<JS<Attr>>>,
id_attribute: DOMRefCell<Option<Atom>>,
#[ignore_heap_size_of = "Arc"]
style_attribute: DOMRefCell<Option<Arc<Locked<PropertyDeclarationBlock>>>>,
attr_list: MutNullableJS<NamedNodeMap>,
class_list: MutNullableJS<DOMTokenList>,
state: Cell<ElementState>,
/// These flags are set by the style system to indicate the that certain
/// operations may require restyling this element or its descendants. The
/// flags are not atomic, so the style system takes care of only set them
/// when it has exclusive access to the element.
#[ignore_heap_size_of = "bitflags defined in rust-selectors"]
selector_flags: Cell<ElementSelectorFlags>,
}
impl fmt::Debug for Element {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<{}", self.local_name)?;
if let Some(ref id) = *self.id_attribute.borrow() {
write!(f, " id={}", id)?;
}
write!(f, ">")
}
}
impl fmt::Debug for Root<Element> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(**self).fmt(f)
}
}
#[derive(PartialEq, HeapSizeOf)]
pub enum ElementCreator {
ParserCreated(u64),
ScriptCreated,
}
impl ElementCreator {
pub fn is_parser_created(&self) -> bool {
match *self {
ElementCreator::ParserCreated(_) => true,
ElementCreator::ScriptCreated => false,
}
}
pub fn return_line_number(&self) -> u64 {
match *self {
ElementCreator::ParserCreated(l) => l,
ElementCreator::ScriptCreated => 1,
}
}
}
pub enum AdjacentPosition {
BeforeBegin,
AfterEnd,
AfterBegin,
BeforeEnd,
}
impl<'a> TryFrom<&'a str> for AdjacentPosition {
type Error = Error;
fn try_from(position: &'a str) -> Result<AdjacentPosition, Self::Error> {
match_ignore_ascii_case! { &*position,
"beforebegin" => Ok(AdjacentPosition::BeforeBegin),
"afterbegin" => Ok(AdjacentPosition::AfterBegin),
"beforeend" => Ok(AdjacentPosition::BeforeEnd),
"afterend" => Ok(AdjacentPosition::AfterEnd),
_ => Err(Error::Syntax)
}
}
}
//
// Element methods
//
impl Element {
pub fn create(name: QualName,
document: &Document, creator: ElementCreator)
-> Root<Element> {
create_element(name, document, creator)
}
pub fn new_inherited(local_name: LocalName,
namespace: Namespace, prefix: Option<Prefix>,
document: &Document) -> Element {
Element::new_inherited_with_state(ElementState::empty(), local_name,
namespace, prefix, document)
}
pub fn new_inherited_with_state(state: ElementState, local_name: LocalName,
namespace: Namespace, prefix: Option<Prefix>,
document: &Document)
-> Element {
Element {
node: Node::new_inherited(document),
local_name: local_name,
tag_name: TagName::new(),
namespace: namespace,
prefix: prefix,
attrs: DOMRefCell::new(vec![]),
id_attribute: DOMRefCell::new(None),
style_attribute: DOMRefCell::new(None),
attr_list: Default::default(),
class_list: Default::default(),
state: Cell::new(state),
selector_flags: Cell::new(ElementSelectorFlags::empty()),
}
}
pub fn new(local_name: LocalName,
namespace: Namespace,
prefix: Option<Prefix>,
document: &Document) -> Root<Element> {
Node::reflect_node(
box Element::new_inherited(local_name, namespace, prefix, document),
document,
ElementBinding::Wrap)
}
pub fn restyle(&self, damage: NodeDamage) {
let doc = self.node.owner_doc();
let mut restyle = doc.ensure_pending_restyle(self);
// FIXME(bholley): I think we should probably only do this for
// NodeStyleDamaged, but I'm preserving existing behavior.
restyle.hint.insert(RESTYLE_SELF);
if damage == NodeDamage::OtherNodeDamage {
restyle.damage = RestyleDamage::rebuild_and_reflow();
}
}
// https://drafts.csswg.org/cssom-view/#css-layout-box
// Elements that have a computed value of the display property
// that is table-column or table-column-group
// FIXME: Currently, it is assumed to be true always
fn has_css_layout_box(&self) -> bool {
true
}
// https://drafts.csswg.org/cssom-view/#potentially-scrollable
fn potentially_scrollable(&self) -> bool {
self.has_css_layout_box() &&
!self.overflow_x_is_visible() &&
!self.overflow_y_is_visible()
}
// used value of overflow-x is "visible"
fn overflow_x_is_visible(&self) -> bool {
let window = window_from_node(self);
let overflow_pair = window.overflow_query(self.upcast::<Node>().to_trusted_node_address());
overflow_pair.x == overflow_x::computed_value::T::visible
}
// used value of overflow-y is "visible"
fn overflow_y_is_visible(&self) -> bool {
let window = window_from_node(self);
let overflow_pair = window.overflow_query(self.upcast::<Node>().to_trusted_node_address());
overflow_pair.y != overflow_x::computed_value::T::visible
}
}
#[allow(unsafe_code)]
pub trait RawLayoutElementHelpers {
unsafe fn get_attr_for_layout<'a>(&'a self, namespace: &Namespace, name: &LocalName)
-> Option<&'a AttrValue>;
unsafe fn get_attr_val_for_layout<'a>(&'a self, namespace: &Namespace, name: &LocalName)
-> Option<&'a str>;
unsafe fn get_attr_vals_for_layout<'a>(&'a self, name: &LocalName) -> Vec<&'a AttrValue>;
}
#[inline]
#[allow(unsafe_code)]
pub unsafe fn get_attr_for_layout<'a>(elem: &'a Element, namespace: &Namespace, name: &LocalName)
-> Option<LayoutJS<Attr>> {
// cast to point to T in RefCell<T> directly
let attrs = elem.attrs.borrow_for_layout();
attrs.iter().find(|attr| {
let attr = attr.to_layout();
*name == attr.local_name_atom_forever() &&
(*attr.unsafe_get()).namespace() == namespace
}).map(|attr| attr.to_layout())
}
#[allow(unsafe_code)]
impl RawLayoutElementHelpers for Element {
#[inline]
unsafe fn get_attr_for_layout<'a>(&'a self, namespace: &Namespace, name: &LocalName)
-> Option<&'a AttrValue> {
get_attr_for_layout(self, namespace, name).map(|attr| {
attr.value_forever()
})
}
#[inline]
unsafe fn get_attr_val_for_layout<'a>(&'a self, namespace: &Namespace, name: &LocalName)
-> Option<&'a str> {
get_attr_for_layout(self, namespace, name).map(|attr| {
attr.value_ref_forever()
})
}
#[inline]
unsafe fn get_attr_vals_for_layout<'a>(&'a self, name: &LocalName) -> Vec<&'a AttrValue> {
let attrs = self.attrs.borrow_for_layout();
attrs.iter().filter_map(|attr| {
let attr = attr.to_layout();
if *name == attr.local_name_atom_forever() {
Some(attr.value_forever())
} else {
None
}
}).collect()
}
}
pub trait LayoutElementHelpers {
#[allow(unsafe_code)]
unsafe fn has_class_for_layout(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool;
#[allow(unsafe_code)]
unsafe fn get_classes_for_layout(&self) -> Option<&'static [Atom]>;
#[allow(unsafe_code)]
unsafe fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, &mut V)
where V: Push<ApplicableDeclarationBlock>;
#[allow(unsafe_code)]
unsafe fn get_colspan(self) -> u32;
#[allow(unsafe_code)]
unsafe fn get_rowspan(self) -> u32;
#[allow(unsafe_code)]
unsafe fn html_element_in_html_document_for_layout(&self) -> bool;
fn id_attribute(&self) -> *const Option<Atom>;
fn style_attribute(&self) -> *const Option<Arc<Locked<PropertyDeclarationBlock>>>;
fn local_name(&self) -> &LocalName;
fn namespace(&self) -> &Namespace;
fn get_lang_for_layout(&self) -> String;
fn get_checked_state_for_layout(&self) -> bool;
fn get_indeterminate_state_for_layout(&self) -> bool;
fn get_state_for_layout(&self) -> ElementState;
fn insert_selector_flags(&self, flags: ElementSelectorFlags);
fn has_selector_flags(&self, flags: ElementSelectorFlags) -> bool;
}
impl LayoutElementHelpers for LayoutJS<Element> {
#[allow(unsafe_code)]
#[inline]
unsafe fn has_class_for_layout(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
get_attr_for_layout(&*self.unsafe_get(), &ns!(), &local_name!("class")).map_or(false, |attr| {
attr.value_tokens_forever().unwrap().iter().any(|atom| case_sensitivity.eq_atom(atom, name))
})
}
#[allow(unsafe_code)]
#[inline]
unsafe fn get_classes_for_layout(&self) -> Option<&'static [Atom]> {
get_attr_for_layout(&*self.unsafe_get(), &ns!(), &local_name!("class"))
.map(|attr| attr.value_tokens_forever().unwrap())
}
#[allow(unsafe_code)]
unsafe fn synthesize_presentational_hints_for_legacy_attributes<V>(&self, hints: &mut V)
where V: Push<ApplicableDeclarationBlock>
{
// FIXME(emilio): Just a single PDB should be enough.
#[inline]
fn from_declaration(shared_lock: &SharedRwLock, declaration: PropertyDeclaration)
-> ApplicableDeclarationBlock {
ApplicableDeclarationBlock::from_declarations(
Arc::new(shared_lock.wrap(PropertyDeclarationBlock::with_one(
declaration, Importance::Normal
))),
CascadeLevel::PresHints)
}
let document = self.upcast::<Node>().owner_doc_for_layout();
let shared_lock = document.style_shared_lock();
let bgcolor = if let Some(this) = self.downcast::<HTMLBodyElement>() {
this.get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableElement>() {
this.get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableCellElement>() {
this.get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableRowElement>() {
this.get_background_color()
} else if let Some(this) = self.downcast::<HTMLTableSectionElement>() {
this.get_background_color()
} else {
None
};
if let Some(color) = bgcolor {
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::BackgroundColor(color.into())
));
}
let background = if let Some(this) = self.downcast::<HTMLBodyElement>() {
this.get_background()
} else {
None
};
if let Some(url) = background {
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::BackgroundImage(
background_image::SpecifiedValue(vec![
Either::Second(specified::Image::for_cascade(url.into()))
]))));
}
let color = if let Some(this) = self.downcast::<HTMLFontElement>() {
this.get_color()
} else if let Some(this) = self.downcast::<HTMLBodyElement>() {
// https://html.spec.whatwg.org/multipage/#the-page:the-body-element-20
this.get_color()
} else if let Some(this) = self.downcast::<HTMLHRElement>() {
// https://html.spec.whatwg.org/multipage/#the-hr-element-2:presentational-hints-5
this.get_color()
} else {
None
};
if let Some(color) = color {
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Color(
longhands::color::SpecifiedValue(color.into())
)
));
}
let font_family = if let Some(this) = self.downcast::<HTMLFontElement>() {
this.get_face()
} else {
None
};
if let Some(font_family) = font_family {
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::FontFamily(
font_family::SpecifiedValue::Values(vec![
font_family::computed_value::FontFamily::from_atom(
font_family)]))));
}
let font_size = self.downcast::<HTMLFontElement>().and_then(|this| this.get_size());
if let Some(font_size) = font_size {
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::FontSize(
font_size::SpecifiedValue::from_html_size(font_size as u8)
)
))
}
let cellspacing = if let Some(this) = self.downcast::<HTMLTableElement>() {
this.get_cellspacing()
} else {
None
};
if let Some(cellspacing) = cellspacing {
let width_value = specified::Length::from_px(cellspacing as f32);
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::BorderSpacing(
Box::new(border_spacing::SpecifiedValue {
horizontal: width_value,
vertical: None,
}))));
}
let size = if let Some(this) = self.downcast::<HTMLInputElement>() {
// FIXME(pcwalton): More use of atoms, please!
match (*self.unsafe_get()).get_attr_val_for_layout(&ns!(), &local_name!("type")) {
// Not text entry widget
Some("hidden") | Some("date") | Some("month") | Some("week") |
Some("time") | Some("datetime-local") | Some("number") | Some("range") |
Some("color") | Some("checkbox") | Some("radio") | Some("file") |
Some("submit") | Some("image") | Some("reset") | Some("button") => {
None
},
// Others
_ => {
match this.size_for_layout() {
0 => None,
s => Some(s as i32),
}
},
}
} else {
None
};
if let Some(size) = size {
let value = specified::NoCalcLength::ServoCharacterWidth(specified::CharacterWidth(size));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Width(
specified::LengthOrPercentageOrAuto::Length(value))));
}
let width = if let Some(this) = self.downcast::<HTMLIFrameElement>() {
this.get_width()
} else if let Some(this) = self.downcast::<HTMLImageElement>() {
this.get_width()
} else if let Some(this) = self.downcast::<HTMLTableElement>() {
this.get_width()
} else if let Some(this) = self.downcast::<HTMLTableCellElement>() {
this.get_width()
} else if let Some(this) = self.downcast::<HTMLHRElement>() {
// https://html.spec.whatwg.org/multipage/#the-hr-element-2:attr-hr-width
this.get_width()
} else if let Some(this) = self.downcast::<HTMLCanvasElement>() {
this.get_width()
} else {
LengthOrPercentageOrAuto::Auto
};
// FIXME(emilio): Use from_computed value here and below.
match width {
LengthOrPercentageOrAuto::Auto => {}
LengthOrPercentageOrAuto::Percentage(percentage) => {
let width_value =
specified::LengthOrPercentageOrAuto::Percentage(specified::Percentage(percentage));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Width(width_value)));
}
LengthOrPercentageOrAuto::Length(length) => {
let width_value = specified::LengthOrPercentageOrAuto::Length(
specified::NoCalcLength::Absolute(specified::AbsoluteLength::Px(length.to_f32_px())));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Width(width_value)));
}
}
let height = if let Some(this) = self.downcast::<HTMLIFrameElement>() {
this.get_height()
} else if let Some(this) = self.downcast::<HTMLImageElement>() {
this.get_height()
} else if let Some(this) = self.downcast::<HTMLCanvasElement>() {
this.get_height()
} else {
LengthOrPercentageOrAuto::Auto
};
match height {
LengthOrPercentageOrAuto::Auto => {}
LengthOrPercentageOrAuto::Percentage(percentage) => {
let height_value =
specified::LengthOrPercentageOrAuto::Percentage(specified::Percentage(percentage));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Height(height_value)));
}
LengthOrPercentageOrAuto::Length(length) => {
let height_value = specified::LengthOrPercentageOrAuto::Length(
specified::NoCalcLength::Absolute(specified::AbsoluteLength::Px(length.to_f32_px())));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Height(height_value)));
}
}
let cols = if let Some(this) = self.downcast::<HTMLTextAreaElement>() {
match this.get_cols() {
0 => None,
c => Some(c as i32),
}
} else {
None
};
if let Some(cols) = cols {
// TODO(mttr) ServoCharacterWidth uses the size math for <input type="text">, but
// the math for <textarea> is a little different since we need to take
// scrollbar size into consideration (but we don't have a scrollbar yet!)
//
// https://html.spec.whatwg.org/multipage/#textarea-effective-width
let value = specified::NoCalcLength::ServoCharacterWidth(specified::CharacterWidth(cols));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Width(specified::LengthOrPercentageOrAuto::Length(value))));
}
let rows = if let Some(this) = self.downcast::<HTMLTextAreaElement>() {
match this.get_rows() {
0 => None,
r => Some(r as i32),
}
} else {
None
};
if let Some(rows) = rows {
// TODO(mttr) This should take scrollbar size into consideration.
//
// https://html.spec.whatwg.org/multipage/#textarea-effective-height
let value = specified::NoCalcLength::FontRelative(specified::FontRelativeLength::Em(rows as CSSFloat));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::Height(specified::LengthOrPercentageOrAuto::Length(value))));
}
let border = if let Some(this) = self.downcast::<HTMLTableElement>() {
this.get_border()
} else {
None
};
if let Some(border) = border {
let width_value = specified::BorderSideWidth::Length(specified::Length::from_px(border as f32));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::BorderTopWidth(width_value.clone())));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::BorderLeftWidth(width_value.clone())));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::BorderBottomWidth(width_value.clone())));
hints.push(from_declaration(
shared_lock,
PropertyDeclaration::BorderRightWidth(width_value)));
}
}
#[allow(unsafe_code)]
unsafe fn get_colspan(self) -> u32 {
if let Some(this) = self.downcast::<HTMLTableCellElement>() {
this.get_colspan().unwrap_or(1)
} else {
// Don't panic since `display` can cause this to be called on arbitrary
// elements.
1
}
}
#[allow(unsafe_code)]
unsafe fn get_rowspan(self) -> u32 {
if let Some(this) = self.downcast::<HTMLTableCellElement>() {
this.get_rowspan().unwrap_or(1)
} else {
// Don't panic since `display` can cause this to be called on arbitrary
// elements.
1
}
}
#[inline]
#[allow(unsafe_code)]
unsafe fn html_element_in_html_document_for_layout(&self) -> bool {
if (*self.unsafe_get()).namespace != ns!(html) {
return false;
}
self.upcast::<Node>().owner_doc_for_layout().is_html_document_for_layout()
}
#[allow(unsafe_code)]
fn id_attribute(&self) -> *const Option<Atom> {
unsafe {
(*self.unsafe_get()).id_attribute.borrow_for_layout()
}
}
#[allow(unsafe_code)]
fn style_attribute(&self) -> *const Option<Arc<Locked<PropertyDeclarationBlock>>> {
unsafe {
(*self.unsafe_get()).style_attribute.borrow_for_layout()
}
}
#[allow(unsafe_code)]
fn local_name(&self) -> &LocalName {
unsafe {
&(*self.unsafe_get()).local_name
}
}
#[allow(unsafe_code)]
fn namespace(&self) -> &Namespace {
unsafe {
&(*self.unsafe_get()).namespace
}
}
#[allow(unsafe_code)]
fn get_lang_for_layout(&self) -> String {
unsafe {
let mut current_node = Some(self.upcast::<Node>());
while let Some(node) = current_node {
current_node = node.parent_node_ref();
match node.downcast::<Element>().map(|el| el.unsafe_get()) {
Some(elem) => {
if let Some(attr) = (*elem).get_attr_val_for_layout(&ns!(xml), &local_name!("lang")) {
return attr.to_owned();
}
if let Some(attr) = (*elem).get_attr_val_for_layout(&ns!(), &local_name!("lang")) {
return attr.to_owned();
}
}
None => continue
}
}
// TODO: Check meta tags for a pragma-set default language
// TODO: Check HTTP Content-Language header
String::new()
}
}
#[inline]
#[allow(unsafe_code)]
fn get_checked_state_for_layout(&self) -> bool {
// TODO option and menuitem can also have a checked state.
match self.downcast::<HTMLInputElement>() {
Some(input) => unsafe {
input.checked_state_for_layout()
},
None => false,
}
}
#[inline]
#[allow(unsafe_code)]
fn get_indeterminate_state_for_layout(&self) -> bool {
// TODO progress elements can also be matched with :indeterminate
match self.downcast::<HTMLInputElement>() {
Some(input) => unsafe {
input.indeterminate_state_for_layout()
},
None => false,
}
}
#[inline]
#[allow(unsafe_code)]
fn get_state_for_layout(&self) -> ElementState {
unsafe {
(*self.unsafe_get()).state.get()
}
}
#[inline]
#[allow(unsafe_code)]
fn insert_selector_flags(&self, flags: ElementSelectorFlags) {
debug_assert!(thread_state::get().is_layout());
unsafe {
let f = &(*self.unsafe_get()).selector_flags;
f.set(f.get() | flags);
}
}
#[inline]
#[allow(unsafe_code)]
fn has_selector_flags(&self, flags: ElementSelectorFlags) -> bool {
unsafe {
(*self.unsafe_get()).selector_flags.get().contains(flags)
}
}
}
impl Element {
pub fn html_element_in_html_document(&self) -> bool {
self.namespace == ns!(html) && self.upcast::<Node>().is_in_html_doc()
}
pub fn local_name(&self) -> &LocalName {
&self.local_name
}
pub fn parsed_name(&self, mut name: DOMString) -> LocalName {
if self.html_element_in_html_document() {
name.make_ascii_lowercase();
}
LocalName::from(name)
}
pub fn namespace(&self) -> &Namespace {
&self.namespace
}
pub fn prefix(&self) -> Option<&Prefix> {
self.prefix.as_ref()
}
pub fn attrs(&self) -> Ref<[JS<Attr>]> {
Ref::map(self.attrs.borrow(), |attrs| &**attrs)
}
// Element branch of https://dom.spec.whatwg.org/#locate-a-namespace
pub fn locate_namespace(&self, prefix: Option<DOMString>) -> Namespace {
let prefix = prefix.map(String::from).map(LocalName::from);
let inclusive_ancestor_elements =
self.upcast::<Node>()
.inclusive_ancestors()
.filter_map(Root::downcast::<Self>);
// Steps 3-4.
for element in inclusive_ancestor_elements {
// Step 1.
if element.namespace() != &ns!() && element.prefix().map(|p| &**p) == prefix.as_ref().map(|p| &**p) {
return element.namespace().clone();
}
// Step 2.
let attr = ref_filter_map(self.attrs(), |attrs| {
attrs.iter().find(|attr| {
if attr.namespace() != &ns!(xmlns) {
return false;
}
match (attr.prefix(), prefix.as_ref()) {
(Some(&namespace_prefix!("xmlns")), Some(prefix)) => {
attr.local_name() == prefix
},
(None, None) => attr.local_name() == &local_name!("xmlns"),
_ => false,
}
})
});
if let Some(attr) = attr {
return (**attr.value()).into();
}
}
ns!()
}
pub fn style_attribute(&self) -> &DOMRefCell<Option<Arc<Locked<PropertyDeclarationBlock>>>> {
&self.style_attribute
}
pub fn summarize(&self) -> Vec<AttrInfo> {
self.attrs.borrow().iter()
.map(|attr| attr.summarize())
.collect()
}
pub fn is_void(&self) -> bool {
if self.namespace != ns!(html) {
return false
}
match self.local_name {
/* List of void elements from
https://html.spec.whatwg.org/multipage/#html-fragment-serialisation-algorithm */
local_name!("area") | local_name!("base") | local_name!("basefont") |
local_name!("bgsound") | local_name!("br") |
local_name!("col") | local_name!("embed") | local_name!("frame") |
local_name!("hr") | local_name!("img") |
local_name!("input") | local_name!("keygen") | local_name!("link") |<|fim▁hole|> local_name!("param") | local_name!("source") | local_name!("track") |
local_name!("wbr") => true,
_ => false
}
}
pub fn serialize(&self, traversal_scope: TraversalScope) -> Fallible<DOMString> {
let mut writer = vec![];
match serialize(&mut writer,
&self.upcast::<Node>(),
SerializeOpts {
traversal_scope: traversal_scope,
..Default::default()
}) {
// FIXME(ajeffrey): Directly convert UTF8 to DOMString
Ok(()) => Ok(DOMString::from(String::from_utf8(writer).unwrap())),
Err(_) => panic!("Cannot serialize element"),
}
}
pub fn root_element(&self) -> Root<Element> {
if self.node.is_in_doc() {
self.upcast::<Node>()
.owner_doc()
.GetDocumentElement()
.unwrap()
} else {
self.upcast::<Node>()
.inclusive_ancestors()
.filter_map(Root::downcast)
.last()
.expect("We know inclusive_ancestors will return `self` which is an element")
}
}
// https://dom.spec.whatwg.org/#locate-a-namespace-prefix
pub fn lookup_prefix(&self, namespace: Namespace) -> Option<DOMString> {
for node in self.upcast::<Node>().inclusive_ancestors() {
match node.downcast::<Element>() {
Some(element) => {
// Step 1.
if *element.namespace() == namespace {
if let Some(prefix) = element.GetPrefix() {
return Some(prefix);
}
}
// Step 2.
for attr in element.attrs.borrow().iter() {
if attr.prefix() == Some(&namespace_prefix!("xmlns")) &&
**attr.value() == *namespace {
return Some(attr.LocalName());
}
}
},
None => return None,
}
}
None
}
pub fn is_focusable_area(&self) -> bool {
if self.is_actually_disabled() {
return false;
}
// TODO: Check whether the element is being rendered (i.e. not hidden).
let node = self.upcast::<Node>();
if node.get_flag(SEQUENTIALLY_FOCUSABLE) {
return true;
}
// https://html.spec.whatwg.org/multipage/#specially-focusable
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement)) => {
true
}
_ => false,
}
}
pub fn is_actually_disabled(&self) -> bool {
let node = self.upcast::<Node>();
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLButtonElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLOptionElement)) => {
self.disabled_state()
}
// TODO:
// an optgroup element that has a disabled attribute
// a menuitem element that has a disabled attribute
// a fieldset element that is a disabled fieldset
_ => false,
}
}
pub fn push_new_attribute(&self,
local_name: LocalName,
value: AttrValue,
name: LocalName,
namespace: Namespace,
prefix: Option<Prefix>) {
let window = window_from_node(self);
let attr = Attr::new(&window,
local_name,
value,
name,
namespace,
prefix,
Some(self));
self.push_attribute(&attr);
}
pub fn push_attribute(&self, attr: &Attr) {
let name = attr.local_name().clone();
let namespace = attr.namespace().clone();
let old_value = DOMString::from(&**attr.value());
let mutation = Mutation::Attribute { name, namespace, old_value };
MutationObserver::queue_a_mutation_record(&self.node, mutation);
assert!(attr.GetOwnerElement().r() == Some(self));
self.will_mutate_attr(attr);
self.attrs.borrow_mut().push(JS::from_ref(attr));
if attr.namespace() == &ns!() {
vtable_for(self.upcast()).attribute_mutated(attr, AttributeMutation::Set(None));
}
}
pub fn get_attribute(&self, namespace: &Namespace, local_name: &LocalName) -> Option<Root<Attr>> {
self.attrs
.borrow()
.iter()
.find(|attr| attr.local_name() == local_name && attr.namespace() == namespace)
.map(|js| Root::from_ref(&**js))
}
// https://dom.spec.whatwg.org/#concept-element-attributes-get-by-name
pub fn get_attribute_by_name(&self, name: DOMString) -> Option<Root<Attr>> {
let name = &self.parsed_name(name);
self.attrs.borrow().iter().find(|a| a.name() == name).map(|js| Root::from_ref(&**js))
}
pub fn set_attribute_from_parser(&self,
qname: QualName,
value: DOMString,
prefix: Option<Prefix>) {
// Don't set if the attribute already exists, so we can handle add_attrs_if_missing
if self.attrs
.borrow()
.iter()
.any(|a| *a.local_name() == qname.local && *a.namespace() == qname.ns) {
return;
}
let name = match prefix {
None => qname.local.clone(),
Some(ref prefix) => {
let name = format!("{}:{}", &**prefix, &*qname.local);
LocalName::from(name)
},
};
let value = self.parse_attribute(&qname.ns, &qname.local, value);
self.push_new_attribute(qname.local, value, name, qname.ns, prefix);
}
pub fn set_attribute(&self, name: &LocalName, value: AttrValue) {
assert!(name == &name.to_ascii_lowercase());
assert!(!name.contains(":"));
self.set_first_matching_attribute(name.clone(),
value,
name.clone(),
ns!(),
None,
|attr| attr.local_name() == name);
}
// https://html.spec.whatwg.org/multipage/#attr-data-*
pub fn set_custom_attribute(&self, name: DOMString, value: DOMString) -> ErrorResult {
// Step 1.
if let InvalidXMLName = xml_name_type(&name) {
return Err(Error::InvalidCharacter);
}
// Steps 2-5.
let name = LocalName::from(name);
let value = self.parse_attribute(&ns!(), &name, value);
self.set_first_matching_attribute(name.clone(),
value,
name.clone(),
ns!(),
None,
|attr| {
*attr.name() == name && *attr.namespace() == ns!()
});
Ok(())
}
fn set_first_matching_attribute<F>(&self,
local_name: LocalName,
value: AttrValue,
name: LocalName,
namespace: Namespace,
prefix: Option<Prefix>,
find: F)
where F: Fn(&Attr) -> bool
{
let attr = self.attrs
.borrow()
.iter()
.find(|attr| find(&attr))
.map(|js| Root::from_ref(&**js));
if let Some(attr) = attr {
attr.set_value(value, self);
} else {
self.push_new_attribute(local_name, value, name, namespace, prefix);
};
}
pub fn parse_attribute(&self,
namespace: &Namespace,
local_name: &LocalName,
value: DOMString)
-> AttrValue {
if *namespace == ns!() {
vtable_for(self.upcast()).parse_plain_attribute(local_name, value)
} else {
AttrValue::String(value.into())
}
}
pub fn remove_attribute(&self, namespace: &Namespace, local_name: &LocalName) -> Option<Root<Attr>> {
self.remove_first_matching_attribute(|attr| {
attr.namespace() == namespace && attr.local_name() == local_name
})
}
pub fn remove_attribute_by_name(&self, name: &LocalName) -> Option<Root<Attr>> {
self.remove_first_matching_attribute(|attr| attr.name() == name)
}
fn remove_first_matching_attribute<F>(&self, find: F) -> Option<Root<Attr>>
where F: Fn(&Attr) -> bool {
let idx = self.attrs.borrow().iter().position(|attr| find(&attr));
idx.map(|idx| {
let attr = Root::from_ref(&*(*self.attrs.borrow())[idx]);
self.will_mutate_attr(&attr);
let name = attr.local_name().clone();
let namespace = attr.namespace().clone();
let old_value = DOMString::from(&**attr.value());
let mutation = Mutation::Attribute { name, namespace, old_value, };
MutationObserver::queue_a_mutation_record(&self.node, mutation);
self.attrs.borrow_mut().remove(idx);
attr.set_owner(None);
if attr.namespace() == &ns!() {
vtable_for(self.upcast()).attribute_mutated(&attr, AttributeMutation::Removed);
}
attr
})
}
pub fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
self.get_attribute(&ns!(), &local_name!("class")).map_or(false, |attr| {
attr.value().as_tokens().iter().any(|atom| case_sensitivity.eq_atom(name, atom))
})
}
pub fn set_atomic_attribute(&self, local_name: &LocalName, value: DOMString) {
assert!(*local_name == local_name.to_ascii_lowercase());
let value = AttrValue::from_atomic(value.into());
self.set_attribute(local_name, value);
}
pub fn has_attribute(&self, local_name: &LocalName) -> bool {
assert!(local_name.bytes().all(|b| b.to_ascii_lowercase() == b));
self.attrs
.borrow()
.iter()
.any(|attr| attr.local_name() == local_name && attr.namespace() == &ns!())
}
pub fn set_bool_attribute(&self, local_name: &LocalName, value: bool) {
if self.has_attribute(local_name) == value {
return;
}
if value {
self.set_string_attribute(local_name, DOMString::new());
} else {
self.remove_attribute(&ns!(), local_name);
}
}
pub fn get_url_attribute(&self, local_name: &LocalName) -> DOMString {
assert!(*local_name == local_name.to_ascii_lowercase());
if !self.has_attribute(local_name) {
return DOMString::new();
}
let url = self.get_string_attribute(local_name);
let doc = document_from_node(self);
let base = doc.base_url();
// https://html.spec.whatwg.org/multipage/#reflect
// XXXManishearth this doesn't handle `javascript:` urls properly
match base.join(&url) {
Ok(parsed) => DOMString::from(parsed.into_string()),
Err(_) => DOMString::from(""),
}
}
pub fn set_url_attribute(&self, local_name: &LocalName, value: DOMString) {
self.set_string_attribute(local_name, value);
}
pub fn get_string_attribute(&self, local_name: &LocalName) -> DOMString {
match self.get_attribute(&ns!(), local_name) {
Some(x) => x.Value(),
None => DOMString::new(),
}
}
pub fn set_string_attribute(&self, local_name: &LocalName, value: DOMString) {
assert!(*local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::String(value.into()));
}
pub fn get_tokenlist_attribute(&self, local_name: &LocalName) -> Vec<Atom> {
self.get_attribute(&ns!(), local_name).map(|attr| {
attr.value()
.as_tokens()
.to_vec()
}).unwrap_or(vec!())
}
pub fn set_tokenlist_attribute(&self, local_name: &LocalName, value: DOMString) {
assert!(*local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name,
AttrValue::from_serialized_tokenlist(value.into()));
}
pub fn set_atomic_tokenlist_attribute(&self, local_name: &LocalName, tokens: Vec<Atom>) {
assert!(*local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::from_atomic_tokens(tokens));
}
pub fn get_int_attribute(&self, local_name: &LocalName, default: i32) -> i32 {
// TODO: Is this assert necessary?
assert!(local_name.chars().all(|ch| {
!ch.is_ascii() || ch.to_ascii_lowercase() == ch
}));
let attribute = self.get_attribute(&ns!(), local_name);
match attribute {
Some(ref attribute) => {
match *attribute.value() {
AttrValue::Int(_, value) => value,
_ => panic!("Expected an AttrValue::Int: \
implement parse_plain_attribute"),
}
}
None => default,
}
}
pub fn set_int_attribute(&self, local_name: &LocalName, value: i32) {
assert!(*local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::Int(value.to_string(), value));
}
pub fn get_uint_attribute(&self, local_name: &LocalName, default: u32) -> u32 {
assert!(local_name.chars().all(|ch| !ch.is_ascii() || ch.to_ascii_lowercase() == ch));
let attribute = self.get_attribute(&ns!(), local_name);
match attribute {
Some(ref attribute) => {
match *attribute.value() {
AttrValue::UInt(_, value) => value,
_ => panic!("Expected an AttrValue::UInt: implement parse_plain_attribute"),
}
}
None => default,
}
}
pub fn set_uint_attribute(&self, local_name: &LocalName, value: u32) {
assert!(*local_name == local_name.to_ascii_lowercase());
self.set_attribute(local_name, AttrValue::UInt(value.to_string(), value));
}
pub fn will_mutate_attr(&self, attr: &Attr) {
let node = self.upcast::<Node>();
node.owner_doc().element_attr_will_change(self, attr);
}
// https://dom.spec.whatwg.org/#insert-adjacent
pub fn insert_adjacent(&self, where_: AdjacentPosition, node: &Node)
-> Fallible<Option<Root<Node>>> {
let self_node = self.upcast::<Node>();
match where_ {
AdjacentPosition::BeforeBegin => {
if let Some(parent) = self_node.GetParentNode() {
Node::pre_insert(node, &parent, Some(self_node)).map(Some)
} else {
Ok(None)
}
}
AdjacentPosition::AfterBegin => {
Node::pre_insert(node, &self_node, self_node.GetFirstChild().r()).map(Some)
}
AdjacentPosition::BeforeEnd => {
Node::pre_insert(node, &self_node, None).map(Some)
}
AdjacentPosition::AfterEnd => {
if let Some(parent) = self_node.GetParentNode() {
Node::pre_insert(node, &parent, self_node.GetNextSibling().r()).map(Some)
} else {
Ok(None)
}
}
}
}
// https://drafts.csswg.org/cssom-view/#dom-element-scroll
pub fn scroll(&self, x_: f64, y_: f64, behavior: ScrollBehavior) {
// Step 1.2 or 2.3
let x = if x_.is_finite() { x_ } else { 0.0f64 };
let y = if y_.is_finite() { y_ } else { 0.0f64 };
let node = self.upcast::<Node>();
// Step 3
let doc = node.owner_doc();
// Step 4
if !doc.is_fully_active() {
return;
}
// Step 5
let win = match doc.GetDefaultView() {
None => return,
Some(win) => win,
};
// Step 7
if *self.root_element() == *self {
if doc.quirks_mode() != QuirksMode::Quirks {
win.scroll(x, y, behavior);
}
return;
}
// Step 9
if doc.GetBody().r() == self.downcast::<HTMLElement>() &&
doc.quirks_mode() == QuirksMode::Quirks &&
!self.potentially_scrollable() {
win.scroll(x, y, behavior);
return;
}
// Step 10 (TODO)
// Step 11
win.scroll_node(node.to_trusted_node_address(), x, y, behavior);
}
// https://w3c.github.io/DOM-Parsing/#parsing
pub fn parse_fragment(&self, markup: DOMString) -> Fallible<Root<DocumentFragment>> {
// Steps 1-2.
let context_document = document_from_node(self);
// TODO(#11995): XML case.
let new_children = ServoParser::parse_html_fragment(self, markup);
// Step 3.
let fragment = DocumentFragment::new(&context_document);
// Step 4.
for child in new_children {
fragment.upcast::<Node>().AppendChild(&child).unwrap();
}
// Step 5.
Ok(fragment)
}
pub fn fragment_parsing_context(owner_doc: &Document, element: Option<&Self>) -> Root<Self> {
match element {
Some(elem) if elem.local_name() != &local_name!("html") || !elem.html_element_in_html_document() => {
Root::from_ref(elem)
},
_ => {
Root::upcast(HTMLBodyElement::new(local_name!("body"), None, owner_doc))
}
}
}
// https://fullscreen.spec.whatwg.org/#fullscreen-element-ready-check
pub fn fullscreen_element_ready_check(&self) -> bool {
if !self.is_connected() {
return false
}
let document = document_from_node(self);
document.get_allow_fullscreen()
}
// https://html.spec.whatwg.org/multipage/#home-subtree
pub fn is_in_same_home_subtree<T>(&self, other: &T) -> bool
where T: DerivedFrom<Element> + DomObject
{
let other = other.upcast::<Element>();
self.root_element() == other.root_element()
}
}
impl ElementMethods for Element {
// https://dom.spec.whatwg.org/#dom-element-namespaceuri
fn GetNamespaceURI(&self) -> Option<DOMString> {
Node::namespace_to_string(self.namespace.clone())
}
// https://dom.spec.whatwg.org/#dom-element-localname
fn LocalName(&self) -> DOMString {
// FIXME(ajeffrey): Convert directly from LocalName to DOMString
DOMString::from(&*self.local_name)
}
// https://dom.spec.whatwg.org/#dom-element-prefix
fn GetPrefix(&self) -> Option<DOMString> {
self.prefix.as_ref().map(|p| DOMString::from(&**p))
}
// https://dom.spec.whatwg.org/#dom-element-tagname
fn TagName(&self) -> DOMString {
let name = self.tag_name.or_init(|| {
let qualified_name = match self.prefix {
Some(ref prefix) => {
Cow::Owned(format!("{}:{}", &**prefix, &*self.local_name))
},
None => Cow::Borrowed(&*self.local_name)
};
if self.html_element_in_html_document() {
LocalName::from(qualified_name.to_ascii_uppercase())
} else {
LocalName::from(qualified_name)
}
});
DOMString::from(&*name)
}
// https://dom.spec.whatwg.org/#dom-element-id
fn Id(&self) -> DOMString {
self.get_string_attribute(&local_name!("id"))
}
// https://dom.spec.whatwg.org/#dom-element-id
fn SetId(&self, id: DOMString) {
self.set_atomic_attribute(&local_name!("id"), id);
}
// https://dom.spec.whatwg.org/#dom-element-classname
fn ClassName(&self) -> DOMString {
self.get_string_attribute(&local_name!("class"))
}
// https://dom.spec.whatwg.org/#dom-element-classname
fn SetClassName(&self, class: DOMString) {
self.set_tokenlist_attribute(&local_name!("class"), class);
}
// https://dom.spec.whatwg.org/#dom-element-classlist
fn ClassList(&self) -> Root<DOMTokenList> {
self.class_list.or_init(|| DOMTokenList::new(self, &local_name!("class")))
}
// https://dom.spec.whatwg.org/#dom-element-attributes
fn Attributes(&self) -> Root<NamedNodeMap> {
self.attr_list.or_init(|| NamedNodeMap::new(&window_from_node(self), self))
}
// https://dom.spec.whatwg.org/#dom-element-hasattributes
fn HasAttributes(&self) -> bool {
!self.attrs.borrow().is_empty()
}
// https://dom.spec.whatwg.org/#dom-element-getattributenames
fn GetAttributeNames(&self) -> Vec<DOMString> {
self.attrs.borrow().iter().map(|attr| attr.Name()).collect()
}
// https://dom.spec.whatwg.org/#dom-element-getattribute
fn GetAttribute(&self, name: DOMString) -> Option<DOMString> {
self.GetAttributeNode(name)
.map(|s| s.Value())
}
// https://dom.spec.whatwg.org/#dom-element-getattributens
fn GetAttributeNS(&self,
namespace: Option<DOMString>,
local_name: DOMString)
-> Option<DOMString> {
self.GetAttributeNodeNS(namespace, local_name)
.map(|attr| attr.Value())
}
// https://dom.spec.whatwg.org/#dom-element-getattributenode
fn GetAttributeNode(&self, name: DOMString) -> Option<Root<Attr>> {
self.get_attribute_by_name(name)
}
// https://dom.spec.whatwg.org/#dom-element-getattributenodens
fn GetAttributeNodeNS(&self,
namespace: Option<DOMString>,
local_name: DOMString)
-> Option<Root<Attr>> {
let namespace = &namespace_from_domstring(namespace);
self.get_attribute(namespace, &LocalName::from(local_name))
}
// https://dom.spec.whatwg.org/#dom-element-setattribute
fn SetAttribute(&self, name: DOMString, value: DOMString) -> ErrorResult {
// Step 1.
if xml_name_type(&name) == InvalidXMLName {
return Err(Error::InvalidCharacter);
}
// Step 2.
let name = self.parsed_name(name);
// Step 3-5.
let value = self.parse_attribute(&ns!(), &name, value);
self.set_first_matching_attribute(
name.clone(), value, name.clone(), ns!(), None,
|attr| *attr.name() == name);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-element-setattributens
fn SetAttributeNS(&self,
namespace: Option<DOMString>,
qualified_name: DOMString,
value: DOMString) -> ErrorResult {
let (namespace, prefix, local_name) =
validate_and_extract(namespace, &qualified_name)?;
let qualified_name = LocalName::from(qualified_name);
let value = self.parse_attribute(&namespace, &local_name, value);
self.set_first_matching_attribute(
local_name.clone(), value, qualified_name, namespace.clone(), prefix,
|attr| *attr.local_name() == local_name && *attr.namespace() == namespace);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-element-setattributenode
fn SetAttributeNode(&self, attr: &Attr) -> Fallible<Option<Root<Attr>>> {
// Step 1.
if let Some(owner) = attr.GetOwnerElement() {
if &*owner != self {
return Err(Error::InUseAttribute);
}
}
// Step 2.
let position = self.attrs.borrow().iter().position(|old_attr| {
attr.namespace() == old_attr.namespace() && attr.local_name() == old_attr.local_name()
});
if let Some(position) = position {
let old_attr = Root::from_ref(&*self.attrs.borrow()[position]);
// Step 3.
if &*old_attr == attr {
return Ok(Some(Root::from_ref(attr)));
}
// Step 4.
self.will_mutate_attr(attr);
attr.set_owner(Some(self));
self.attrs.borrow_mut()[position] = JS::from_ref(attr);
old_attr.set_owner(None);
if attr.namespace() == &ns!() {
vtable_for(self.upcast()).attribute_mutated(
&attr, AttributeMutation::Set(Some(&old_attr.value())));
}
// Step 6.
Ok(Some(old_attr))
} else {
// Step 5.
attr.set_owner(Some(self));
self.push_attribute(attr);
// Step 6.
Ok(None)
}
}
// https://dom.spec.whatwg.org/#dom-element-setattributenodens
fn SetAttributeNodeNS(&self, attr: &Attr) -> Fallible<Option<Root<Attr>>> {
self.SetAttributeNode(attr)
}
// https://dom.spec.whatwg.org/#dom-element-removeattribute
fn RemoveAttribute(&self, name: DOMString) {
let name = self.parsed_name(name);
self.remove_attribute_by_name(&name);
}
// https://dom.spec.whatwg.org/#dom-element-removeattributens
fn RemoveAttributeNS(&self, namespace: Option<DOMString>, local_name: DOMString) {
let namespace = namespace_from_domstring(namespace);
let local_name = LocalName::from(local_name);
self.remove_attribute(&namespace, &local_name);
}
// https://dom.spec.whatwg.org/#dom-element-removeattributenode
fn RemoveAttributeNode(&self, attr: &Attr) -> Fallible<Root<Attr>> {
self.remove_first_matching_attribute(|a| a == attr)
.ok_or(Error::NotFound)
}
// https://dom.spec.whatwg.org/#dom-element-hasattribute
fn HasAttribute(&self, name: DOMString) -> bool {
self.GetAttribute(name).is_some()
}
// https://dom.spec.whatwg.org/#dom-element-hasattributens
fn HasAttributeNS(&self, namespace: Option<DOMString>, local_name: DOMString) -> bool {
self.GetAttributeNS(namespace, local_name).is_some()
}
// https://dom.spec.whatwg.org/#dom-element-getelementsbytagname
fn GetElementsByTagName(&self, localname: DOMString) -> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::by_qualified_name(&window, self.upcast(), LocalName::from(&*localname))
}
// https://dom.spec.whatwg.org/#dom-element-getelementsbytagnamens
fn GetElementsByTagNameNS(&self,
maybe_ns: Option<DOMString>,
localname: DOMString)
-> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::by_tag_name_ns(&window, self.upcast(), localname, maybe_ns)
}
// https://dom.spec.whatwg.org/#dom-element-getelementsbyclassname
fn GetElementsByClassName(&self, classes: DOMString) -> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::by_class_name(&window, self.upcast(), classes)
}
// https://drafts.csswg.org/cssom-view/#dom-element-getclientrects
fn GetClientRects(&self) -> Vec<Root<DOMRect>> {
let win = window_from_node(self);
let raw_rects = self.upcast::<Node>().content_boxes();
raw_rects.iter().map(|rect| {
DOMRect::new(win.upcast(),
rect.origin.x.to_f64_px(),
rect.origin.y.to_f64_px(),
rect.size.width.to_f64_px(),
rect.size.height.to_f64_px())
}).collect()
}
// https://drafts.csswg.org/cssom-view/#dom-element-getboundingclientrect
fn GetBoundingClientRect(&self) -> Root<DOMRect> {
let win = window_from_node(self);
let rect = self.upcast::<Node>().bounding_content_box_or_zero();
DOMRect::new(win.upcast(),
rect.origin.x.to_f64_px(),
rect.origin.y.to_f64_px(),
rect.size.width.to_f64_px(),
rect.size.height.to_f64_px())
}
// https://drafts.csswg.org/cssom-view/#dom-element-scroll
fn Scroll(&self, options: &ScrollToOptions) {
// Step 1
let left = options.left.unwrap_or(self.ScrollLeft());
let top = options.top.unwrap_or(self.ScrollTop());
self.scroll(left, top, options.parent.behavior);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scroll
fn Scroll_(&self, x: f64, y: f64) {
self.scroll(x, y, ScrollBehavior::Auto);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrollto
fn ScrollTo(&self, options: &ScrollToOptions) {
self.Scroll(options);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrollto
fn ScrollTo_(&self, x: f64, y: f64) {
self.Scroll_(x, y);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrollby
fn ScrollBy(&self, options: &ScrollToOptions) {
// Step 2
let delta_left = options.left.unwrap_or(0.0f64);
let delta_top = options.top.unwrap_or(0.0f64);
let left = self.ScrollLeft();
let top = self.ScrollTop();
self.scroll(left + delta_left, top + delta_top,
options.parent.behavior);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrollby
fn ScrollBy_(&self, x: f64, y: f64) {
let left = self.ScrollLeft();
let top = self.ScrollTop();
self.scroll(left + x, top + y, ScrollBehavior::Auto);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrolltop
fn ScrollTop(&self) -> f64 {
let node = self.upcast::<Node>();
// Step 1
let doc = node.owner_doc();
// Step 2
if !doc.is_fully_active() {
return 0.0;
}
// Step 3
let win = match doc.GetDefaultView() {
None => return 0.0,
Some(win) => win,
};
// Step 5
if *self.root_element() == *self {
if doc.quirks_mode() == QuirksMode::Quirks {
return 0.0;
}
// Step 6
return win.ScrollY() as f64;
}
// Step 7
if doc.GetBody().r() == self.downcast::<HTMLElement>() &&
doc.quirks_mode() == QuirksMode::Quirks &&
!self.potentially_scrollable() {
return win.ScrollY() as f64;
}
// Step 8
if !self.has_css_layout_box() {
return 0.0;
}
// Step 9
let point = node.scroll_offset();
return point.y.abs() as f64;
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrolltop
fn SetScrollTop(&self, y_: f64) {
let behavior = ScrollBehavior::Auto;
// Step 1, 2
let y = if y_.is_finite() { y_ } else { 0.0f64 };
let node = self.upcast::<Node>();
// Step 3
let doc = node.owner_doc();
// Step 4
if !doc.is_fully_active() {
return;
}
// Step 5
let win = match doc.GetDefaultView() {
None => return,
Some(win) => win,
};
// Step 7
if *self.root_element() == *self {
if doc.quirks_mode() != QuirksMode::Quirks {
win.scroll(win.ScrollX() as f64, y, behavior);
}
return;
}
// Step 9
if doc.GetBody().r() == self.downcast::<HTMLElement>() &&
doc.quirks_mode() == QuirksMode::Quirks &&
!self.potentially_scrollable() {
win.scroll(win.ScrollX() as f64, y, behavior);
return;
}
// Step 10 (TODO)
// Step 11
win.scroll_node(node.to_trusted_node_address(), self.ScrollLeft(), y, behavior);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrolltop
fn ScrollLeft(&self) -> f64 {
let node = self.upcast::<Node>();
// Step 1
let doc = node.owner_doc();
// Step 2
if !doc.is_fully_active() {
return 0.0;
}
// Step 3
let win = match doc.GetDefaultView() {
None => return 0.0,
Some(win) => win,
};
// Step 5
if *self.root_element() == *self {
if doc.quirks_mode() != QuirksMode::Quirks {
// Step 6
return win.ScrollX() as f64;
}
return 0.0;
}
// Step 7
if doc.GetBody().r() == self.downcast::<HTMLElement>() &&
doc.quirks_mode() == QuirksMode::Quirks &&
!self.potentially_scrollable() {
return win.ScrollX() as f64;
}
// Step 8
if !self.has_css_layout_box() {
return 0.0;
}
// Step 9
let point = node.scroll_offset();
return point.x.abs() as f64;
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrollleft
fn SetScrollLeft(&self, x_: f64) {
let behavior = ScrollBehavior::Auto;
// Step 1, 2
let x = if x_.is_finite() { x_ } else { 0.0f64 };
let node = self.upcast::<Node>();
// Step 3
let doc = node.owner_doc();
// Step 4
if !doc.is_fully_active() {
return;
}
// Step 5
let win = match doc.GetDefaultView() {
None => return,
Some(win) => win,
};
// Step 7
if *self.root_element() == *self {
if doc.quirks_mode() == QuirksMode::Quirks {
return;
}
win.scroll(x, win.ScrollY() as f64, behavior);
return;
}
// Step 9
if doc.GetBody().r() == self.downcast::<HTMLElement>() &&
doc.quirks_mode() == QuirksMode::Quirks &&
!self.potentially_scrollable() {
win.scroll(x, win.ScrollY() as f64, behavior);
return;
}
// Step 10 (TODO)
// Step 11
win.scroll_node(node.to_trusted_node_address(), x, self.ScrollTop(), behavior);
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrollwidth
fn ScrollWidth(&self) -> i32 {
self.upcast::<Node>().scroll_area().size.width
}
// https://drafts.csswg.org/cssom-view/#dom-element-scrollheight
fn ScrollHeight(&self) -> i32 {
self.upcast::<Node>().scroll_area().size.height
}
// https://drafts.csswg.org/cssom-view/#dom-element-clienttop
fn ClientTop(&self) -> i32 {
self.upcast::<Node>().client_rect().origin.y
}
// https://drafts.csswg.org/cssom-view/#dom-element-clientleft
fn ClientLeft(&self) -> i32 {
self.upcast::<Node>().client_rect().origin.x
}
// https://drafts.csswg.org/cssom-view/#dom-element-clientwidth
fn ClientWidth(&self) -> i32 {
self.upcast::<Node>().client_rect().size.width
}
// https://drafts.csswg.org/cssom-view/#dom-element-clientheight
fn ClientHeight(&self) -> i32 {
self.upcast::<Node>().client_rect().size.height
}
/// https://w3c.github.io/DOM-Parsing/#widl-Element-innerHTML
fn GetInnerHTML(&self) -> Fallible<DOMString> {
// XXX TODO: XML case
self.serialize(ChildrenOnly)
}
/// https://w3c.github.io/DOM-Parsing/#widl-Element-innerHTML
fn SetInnerHTML(&self, value: DOMString) -> ErrorResult {
// Step 1.
let frag = self.parse_fragment(value)?;
// Step 2.
// https://github.com/w3c/DOM-Parsing/issues/1
let target = if let Some(template) = self.downcast::<HTMLTemplateElement>() {
Root::upcast(template.Content())
} else {
Root::from_ref(self.upcast())
};
Node::replace_all(Some(frag.upcast()), &target);
Ok(())
}
// https://dvcs.w3.org/hg/innerhtml/raw-file/tip/index.html#widl-Element-outerHTML
fn GetOuterHTML(&self) -> Fallible<DOMString> {
self.serialize(IncludeNode)
}
// https://w3c.github.io/DOM-Parsing/#dom-element-outerhtml
fn SetOuterHTML(&self, value: DOMString) -> ErrorResult {
let context_document = document_from_node(self);
let context_node = self.upcast::<Node>();
// Step 1.
let context_parent = match context_node.GetParentNode() {
None => {
// Step 2.
return Ok(());
},
Some(parent) => parent,
};
let parent = match context_parent.type_id() {
// Step 3.
NodeTypeId::Document(_) => return Err(Error::NoModificationAllowed),
// Step 4.
NodeTypeId::DocumentFragment => {
let body_elem = Element::create(QualName::new(None, ns!(html), local_name!("body")),
&context_document,
ElementCreator::ScriptCreated);
Root::upcast(body_elem)
},
_ => context_node.GetParentElement().unwrap()
};
// Step 5.
let frag = parent.parse_fragment(value)?;
// Step 6.
context_parent.ReplaceChild(frag.upcast(), context_node)?;
Ok(())
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-previouselementsibling
fn GetPreviousElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().preceding_siblings().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-nextelementsibling
fn GetNextElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().following_siblings().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-children
fn Children(&self) -> Root<HTMLCollection> {
let window = window_from_node(self);
HTMLCollection::children(&window, self.upcast())
}
// https://dom.spec.whatwg.org/#dom-parentnode-firstelementchild
fn GetFirstElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().child_elements().next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-lastelementchild
fn GetLastElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().rev_children().filter_map(Root::downcast::<Element>).next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-childelementcount
fn ChildElementCount(&self) -> u32 {
self.upcast::<Node>().child_elements().count() as u32
}
// https://dom.spec.whatwg.org/#dom-parentnode-prepend
fn Prepend(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().prepend(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-append
fn Append(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().append(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselector
fn QuerySelector(&self, selectors: DOMString) -> Fallible<Option<Root<Element>>> {
let root = self.upcast::<Node>();
root.query_selector(selectors)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselectorall
fn QuerySelectorAll(&self, selectors: DOMString) -> Fallible<Root<NodeList>> {
let root = self.upcast::<Node>();
root.query_selector_all(selectors)
}
// https://dom.spec.whatwg.org/#dom-childnode-before
fn Before(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().before(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-after
fn After(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().after(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-replacewith
fn ReplaceWith(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().replace_with(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(&self) {
self.upcast::<Node>().remove_self();
}
// https://dom.spec.whatwg.org/#dom-element-matches
fn Matches(&self, selectors: DOMString) -> Fallible<bool> {
match SelectorParser::parse_author_origin_no_namespace(&selectors) {
Err(_) => Err(Error::Syntax),
Ok(selectors) => {
let quirks_mode = document_from_node(self).quirks_mode();
let mut ctx = MatchingContext::new(MatchingMode::Normal, None,
quirks_mode);
Ok(matches_selector_list(&selectors, &Root::from_ref(self), &mut ctx))
}
}
}
// https://dom.spec.whatwg.org/#dom-element-webkitmatchesselector
fn WebkitMatchesSelector(&self, selectors: DOMString) -> Fallible<bool> {
self.Matches(selectors)
}
// https://dom.spec.whatwg.org/#dom-element-closest
fn Closest(&self, selectors: DOMString) -> Fallible<Option<Root<Element>>> {
match SelectorParser::parse_author_origin_no_namespace(&selectors) {
Err(_) => Err(Error::Syntax),
Ok(selectors) => {
let root = self.upcast::<Node>();
for element in root.inclusive_ancestors() {
if let Some(element) = Root::downcast::<Element>(element) {
let quirks_mode = document_from_node(self).quirks_mode();
let mut ctx = MatchingContext::new(MatchingMode::Normal, None,
quirks_mode);
if matches_selector_list(&selectors, &element, &mut ctx) {
return Ok(Some(element));
}
}
}
Ok(None)
}
}
}
// https://dom.spec.whatwg.org/#dom-element-insertadjacentelement
fn InsertAdjacentElement(&self, where_: DOMString, element: &Element)
-> Fallible<Option<Root<Element>>> {
let where_ = AdjacentPosition::try_from(&*where_)?;
let inserted_node = self.insert_adjacent(where_, element.upcast())?;
Ok(inserted_node.map(|node| Root::downcast(node).unwrap()))
}
// https://dom.spec.whatwg.org/#dom-element-insertadjacenttext
fn InsertAdjacentText(&self, where_: DOMString, data: DOMString)
-> ErrorResult {
// Step 1.
let text = Text::new(data, &document_from_node(self));
// Step 2.
let where_ = AdjacentPosition::try_from(&*where_)?;
self.insert_adjacent(where_, text.upcast()).map(|_| ())
}
// https://w3c.github.io/DOM-Parsing/#dom-element-insertadjacenthtml
fn InsertAdjacentHTML(&self, position: DOMString, text: DOMString)
-> ErrorResult {
// Step 1.
let position = AdjacentPosition::try_from(&*position)?;
let context = match position {
AdjacentPosition::BeforeBegin | AdjacentPosition::AfterEnd => {
match self.upcast::<Node>().GetParentNode() {
Some(ref node) if node.is::<Document>() => {
return Err(Error::NoModificationAllowed)
}
None => return Err(Error::NoModificationAllowed),
Some(node) => node,
}
}
AdjacentPosition::AfterBegin | AdjacentPosition::BeforeEnd => {
Root::from_ref(self.upcast::<Node>())
}
};
// Step 2.
let context = Element::fragment_parsing_context(
&context.owner_doc(), context.downcast::<Element>());
// Step 3.
let fragment = context.parse_fragment(text)?;
// Step 4.
self.insert_adjacent(position, fragment.upcast()).map(|_| ())
}
// check-tidy: no specs after this line
fn EnterFormalActivationState(&self) -> ErrorResult {
match self.as_maybe_activatable() {
Some(a) => {
a.enter_formal_activation_state();
return Ok(());
},
None => return Err(Error::NotSupported)
}
}
fn ExitFormalActivationState(&self) -> ErrorResult {
match self.as_maybe_activatable() {
Some(a) => {
a.exit_formal_activation_state();
return Ok(());
},
None => return Err(Error::NotSupported)
}
}
// https://fullscreen.spec.whatwg.org/#dom-element-requestfullscreen
#[allow(unrooted_must_root)]
fn RequestFullscreen(&self) -> Rc<Promise> {
let doc = document_from_node(self);
doc.enter_fullscreen(self)
}
}
impl VirtualMethods for Element {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<Node>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
let node = self.upcast::<Node>();
let doc = node.owner_doc();
match attr.local_name() {
&local_name!("style") => {
// Modifying the `style` attribute might change style.
*self.style_attribute.borrow_mut() = match mutation {
AttributeMutation::Set(..) => {
// This is the fast path we use from
// CSSStyleDeclaration.
//
// Juggle a bit to keep the borrow checker happy
// while avoiding the extra clone.
let is_declaration = match *attr.value() {
AttrValue::Declaration(..) => true,
_ => false,
};
let block = if is_declaration {
let mut value = AttrValue::String(String::new());
attr.swap_value(&mut value);
let (serialization, block) = match value {
AttrValue::Declaration(s, b) => (s, b),
_ => unreachable!(),
};
let mut value = AttrValue::String(serialization);
attr.swap_value(&mut value);
block
} else {
let win = window_from_node(self);
Arc::new(doc.style_shared_lock().wrap(parse_style_attribute(
&attr.value(),
&doc.base_url(),
win.css_error_reporter(),
doc.quirks_mode())))
};
Some(block)
}
AttributeMutation::Removed => {
None
}
};
},
&local_name!("id") => {
*self.id_attribute.borrow_mut() =
mutation.new_value(attr).and_then(|value| {
let value = value.as_atom();
if value != &atom!("") {
Some(value.clone())
} else {
None
}
});
if node.is_in_doc() {
let value = attr.value().as_atom().clone();
match mutation {
AttributeMutation::Set(old_value) => {
if let Some(old_value) = old_value {
let old_value = old_value.as_atom().clone();
doc.unregister_named_element(self, old_value);
}
if value != atom!("") {
doc.register_named_element(self, value);
}
},
AttributeMutation::Removed => {
if value != atom!("") {
doc.unregister_named_element(self, value);
}
}
}
}
},
_ => {
// FIXME(emilio): This is pretty dubious, and should be done in
// the relevant super-classes.
if attr.namespace() == &ns!() &&
attr.local_name() == &local_name!("src") {
node.dirty(NodeDamage::OtherNodeDamage);
}
},
};
// Make sure we rev the version even if we didn't dirty the node. If we
// don't do this, various attribute-dependent htmlcollections (like those
// generated by getElementsByClassName) might become stale.
node.rev_version();
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("id") => AttrValue::from_atomic(value.into()),
&local_name!("class") => AttrValue::from_serialized_tokenlist(value.into()),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if let Some(f) = self.as_maybe_form_control() {
f.bind_form_control_to_tree();
}
if !tree_in_doc {
return;
}
let doc = document_from_node(self);
if let Some(ref value) = *self.id_attribute.borrow() {
doc.register_named_element(self, value.clone());
}
// This is used for layout optimization.
doc.increment_dom_count();
}
fn unbind_from_tree(&self, context: &UnbindContext) {
self.super_type().unwrap().unbind_from_tree(context);
if let Some(f) = self.as_maybe_form_control() {
f.unbind_form_control_from_tree();
}
if !context.tree_in_doc {
return;
}
let doc = document_from_node(self);
let fullscreen = doc.GetFullscreenElement();
if fullscreen.r() == Some(self) {
doc.exit_fullscreen();
}
if let Some(ref value) = *self.id_attribute.borrow() {
doc.unregister_named_element(self, value.clone());
}
// This is used for layout optimization.
doc.decrement_dom_count();
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
let flags = self.selector_flags.get();
if flags.intersects(HAS_SLOW_SELECTOR) {
// All children of this node need to be restyled when any child changes.
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
} else {
if flags.intersects(HAS_SLOW_SELECTOR_LATER_SIBLINGS) {
if let Some(next_child) = mutation.next_child() {
for child in next_child.inclusively_following_siblings() {
if child.is::<Element>() {
child.dirty(NodeDamage::OtherNodeDamage);
}
}
}
}
if flags.intersects(HAS_EDGE_CHILD_SELECTOR) {
if let Some(child) = mutation.modified_edge_element() {
child.dirty(NodeDamage::OtherNodeDamage);
}
}
}
}
fn adopting_steps(&self, old_doc: &Document) {
self.super_type().unwrap().adopting_steps(old_doc);
if document_from_node(self).is_html_document() != old_doc.is_html_document() {
self.tag_name.clear();
}
}
}
impl<'a> ::selectors::Element for Root<Element> {
type Impl = SelectorImpl;
fn parent_element(&self) -> Option<Root<Element>> {
self.upcast::<Node>().GetParentElement()
}
fn match_pseudo_element(&self,
_pseudo: &PseudoElement,
_context: &mut MatchingContext)
-> bool
{
false
}
fn first_child_element(&self) -> Option<Root<Element>> {
self.node.child_elements().next()
}
fn last_child_element(&self) -> Option<Root<Element>> {
self.node.rev_children().filter_map(Root::downcast).next()
}
fn prev_sibling_element(&self) -> Option<Root<Element>> {
self.node.preceding_siblings().filter_map(Root::downcast).next()
}
fn next_sibling_element(&self) -> Option<Root<Element>> {
self.node.following_siblings().filter_map(Root::downcast).next()
}
fn attr_matches(&self,
ns: &NamespaceConstraint<&Namespace>,
local_name: &LocalName,
operation: &AttrSelectorOperation<&String>)
-> bool {
match *ns {
NamespaceConstraint::Specific(ref ns) => {
self.get_attribute(ns, local_name)
.map_or(false, |attr| attr.value().eval_selector(operation))
}
NamespaceConstraint::Any => {
self.attrs.borrow().iter().any(|attr| {
attr.local_name() == local_name &&
attr.value().eval_selector(operation)
})
}
}
}
fn is_root(&self) -> bool {
match self.node.GetParentNode() {
None => false,
Some(node) => node.is::<Document>(),
}
}
fn is_empty(&self) -> bool {
self.node.children().all(|node| !node.is::<Element>() && match node.downcast::<Text>() {
None => true,
Some(text) => text.upcast::<CharacterData>().data().is_empty()
})
}
fn get_local_name(&self) -> &LocalName {
self.local_name()
}
fn get_namespace(&self) -> &Namespace {
self.namespace()
}
fn match_non_ts_pseudo_class<F>(&self,
pseudo_class: &NonTSPseudoClass,
_: &mut LocalMatchingContext<Self::Impl>,
_: &RelevantLinkStatus,
_: &mut F)
-> bool
where F: FnMut(&Self, ElementSelectorFlags),
{
match *pseudo_class {
// https://github.com/servo/servo/issues/8718
NonTSPseudoClass::Link |
NonTSPseudoClass::AnyLink => self.is_link(),
NonTSPseudoClass::Visited => false,
NonTSPseudoClass::ServoNonZeroBorder => {
match self.downcast::<HTMLTableElement>() {
None => false,
Some(this) => {
match this.get_border() {
None | Some(0) => false,
Some(_) => true,
}
}
}
},
NonTSPseudoClass::ServoCaseSensitiveTypeAttr(ref expected_value) => {
self.get_attribute(&ns!(), &local_name!("type"))
.map_or(false, |attr| attr.value().eq(expected_value))
}
// FIXME(heycam): This is wrong, since extended_filtering accepts
// a string containing commas (separating each language tag in
// a list) but the pseudo-class instead should be parsing and
// storing separate <ident> or <string>s for each language tag.
NonTSPseudoClass::Lang(ref lang) => extended_filtering(&*self.get_lang(), &*lang),
NonTSPseudoClass::ReadOnly =>
!Element::state(self).contains(pseudo_class.state_flag()),
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus |
NonTSPseudoClass::Fullscreen |
NonTSPseudoClass::Hover |
NonTSPseudoClass::Enabled |
NonTSPseudoClass::Disabled |
NonTSPseudoClass::Checked |
NonTSPseudoClass::Indeterminate |
NonTSPseudoClass::ReadWrite |
NonTSPseudoClass::PlaceholderShown |
NonTSPseudoClass::Target =>
Element::state(self).contains(pseudo_class.state_flag()),
}
}
fn is_link(&self) -> bool {
// FIXME: This is HTML only.
let node = self.upcast::<Node>();
match node.type_id() {
// https://html.spec.whatwg.org/multipage/#selector-link
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAreaElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLLinkElement)) => {
self.has_attribute(&local_name!("href"))
},
_ => false,
}
}
fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool {
self.id_attribute.borrow().as_ref().map_or(false, |atom| case_sensitivity.eq_atom(id, atom))
}
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
Element::has_class(&**self, name, case_sensitivity)
}
fn is_html_element_in_html_document(&self) -> bool {
self.html_element_in_html_document()
}
}
impl Element {
pub fn as_maybe_activatable(&self) -> Option<&Activatable> {
let element = match self.upcast::<Node>().type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) => {
let element = self.downcast::<HTMLInputElement>().unwrap();
Some(element as &Activatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLButtonElement)) => {
let element = self.downcast::<HTMLButtonElement>().unwrap();
Some(element as &Activatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAnchorElement)) => {
let element = self.downcast::<HTMLAnchorElement>().unwrap();
Some(element as &Activatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLLabelElement)) => {
let element = self.downcast::<HTMLLabelElement>().unwrap();
Some(element as &Activatable)
},
_ => {
None
}
};
element.and_then(|elem| {
if elem.is_instance_activatable() {
Some(elem)
} else {
None
}
})
}
pub fn as_stylesheet_owner(&self) -> Option<&StylesheetOwner> {
if let Some(s) = self.downcast::<HTMLStyleElement>() {
return Some(s as &StylesheetOwner)
}
if let Some(l) = self.downcast::<HTMLLinkElement>() {
return Some(l as &StylesheetOwner)
}
None
}
// https://html.spec.whatwg.org/multipage/#category-submit
pub fn as_maybe_validatable(&self) -> Option<&Validatable> {
let element = match self.upcast::<Node>().type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) => {
let element = self.downcast::<HTMLInputElement>().unwrap();
Some(element as &Validatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLButtonElement)) => {
let element = self.downcast::<HTMLButtonElement>().unwrap();
Some(element as &Validatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLObjectElement)) => {
let element = self.downcast::<HTMLObjectElement>().unwrap();
Some(element as &Validatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) => {
let element = self.downcast::<HTMLSelectElement>().unwrap();
Some(element as &Validatable)
},
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement)) => {
let element = self.downcast::<HTMLTextAreaElement>().unwrap();
Some(element as &Validatable)
},
_ => {
None
}
};
element
}
pub fn click_in_progress(&self) -> bool {
self.upcast::<Node>().get_flag(CLICK_IN_PROGRESS)
}
pub fn set_click_in_progress(&self, click: bool) {
self.upcast::<Node>().set_flag(CLICK_IN_PROGRESS, click)
}
// https://html.spec.whatwg.org/multipage/#nearest-activatable-element
pub fn nearest_activable_element(&self) -> Option<Root<Element>> {
match self.as_maybe_activatable() {
Some(el) => Some(Root::from_ref(el.as_element())),
None => {
let node = self.upcast::<Node>();
for node in node.ancestors() {
if let Some(node) = node.downcast::<Element>() {
if node.as_maybe_activatable().is_some() {
return Some(Root::from_ref(node));
}
}
}
None
}
}
}
/// Please call this method *only* for real click events
///
/// https://html.spec.whatwg.org/multipage/#run-authentic-click-activation-steps
///
/// Use an element's synthetic click activation (or handle_event) for any script-triggered clicks.
/// If the spec says otherwise, check with Manishearth first
pub fn authentic_click_activation(&self, event: &Event) {
// Not explicitly part of the spec, however this helps enforce the invariants
// required to save state between pre-activation and post-activation
// since we cannot nest authentic clicks (unlike synthetic click activation, where
// the script can generate more click events from the handler)
assert!(!self.click_in_progress());
let target = self.upcast();
// Step 2 (requires canvas support)
// Step 3
self.set_click_in_progress(true);
// Step 4
let e = self.nearest_activable_element();
match e {
Some(ref el) => match el.as_maybe_activatable() {
Some(elem) => {
// Step 5-6
elem.pre_click_activation();
event.fire(target);
if !event.DefaultPrevented() {
// post click activation
elem.activation_behavior(event, target);
} else {
elem.canceled_activation();
}
}
// Step 6
None => {
event.fire(target);
}
},
// Step 6
None => {
event.fire(target);
}
}
// Step 7
self.set_click_in_progress(false);
}
// https://html.spec.whatwg.org/multipage/#language
pub fn get_lang(&self) -> String {
self.upcast::<Node>().inclusive_ancestors().filter_map(|node| {
node.downcast::<Element>().and_then(|el| {
el.get_attribute(&ns!(xml), &local_name!("lang")).or_else(|| {
el.get_attribute(&ns!(), &local_name!("lang"))
}).map(|attr| String::from(attr.Value()))
})
// TODO: Check meta tags for a pragma-set default language
// TODO: Check HTTP Content-Language header
}).next().unwrap_or(String::new())
}
pub fn state(&self) -> ElementState {
self.state.get()
}
pub fn set_state(&self, which: ElementState, value: bool) {
let mut state = self.state.get();
if state.contains(which) == value {
return;
}
let node = self.upcast::<Node>();
node.owner_doc().element_state_will_change(self);
if value {
state.insert(which);
} else {
state.remove(which);
}
self.state.set(state);
}
pub fn active_state(&self) -> bool {
self.state.get().contains(IN_ACTIVE_STATE)
}
/// https://html.spec.whatwg.org/multipage/#concept-selector-active
pub fn set_active_state(&self, value: bool) {
self.set_state(IN_ACTIVE_STATE, value);
if let Some(parent) = self.upcast::<Node>().GetParentElement() {
parent.set_active_state(value);
}
}
pub fn focus_state(&self) -> bool {
self.state.get().contains(IN_FOCUS_STATE)
}
pub fn set_focus_state(&self, value: bool) {
self.set_state(IN_FOCUS_STATE, value);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
pub fn hover_state(&self) -> bool {
self.state.get().contains(IN_HOVER_STATE)
}
pub fn set_hover_state(&self, value: bool) {
self.set_state(IN_HOVER_STATE, value)
}
pub fn enabled_state(&self) -> bool {
self.state.get().contains(IN_ENABLED_STATE)
}
pub fn set_enabled_state(&self, value: bool) {
self.set_state(IN_ENABLED_STATE, value)
}
pub fn disabled_state(&self) -> bool {
self.state.get().contains(IN_DISABLED_STATE)
}
pub fn set_disabled_state(&self, value: bool) {
self.set_state(IN_DISABLED_STATE, value)
}
pub fn read_write_state(&self) -> bool {
self.state.get().contains(IN_READ_WRITE_STATE)
}
pub fn set_read_write_state(&self, value: bool) {
self.set_state(IN_READ_WRITE_STATE, value)
}
pub fn placeholder_shown_state(&self) -> bool {
self.state.get().contains(IN_PLACEHOLDER_SHOWN_STATE)
}
pub fn set_placeholder_shown_state(&self, value: bool) {
if self.placeholder_shown_state() != value {
self.set_state(IN_PLACEHOLDER_SHOWN_STATE, value);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
}
pub fn target_state(&self) -> bool {
self.state.get().contains(IN_TARGET_STATE)
}
pub fn set_target_state(&self, value: bool) {
self.set_state(IN_TARGET_STATE, value)
}
pub fn fullscreen_state(&self) -> bool {
self.state.get().contains(IN_FULLSCREEN_STATE)
}
pub fn set_fullscreen_state(&self, value: bool) {
self.set_state(IN_FULLSCREEN_STATE, value)
}
/// https://dom.spec.whatwg.org/#connected
pub fn is_connected(&self) -> bool {
let node = self.upcast::<Node>();
let root = node.GetRootNode();
root.is::<Document>()
}
}
impl Element {
pub fn check_ancestors_disabled_state_for_form_control(&self) {
let node = self.upcast::<Node>();
if self.disabled_state() {
return;
}
for ancestor in node.ancestors() {
if !ancestor.is::<HTMLFieldSetElement>() {
continue;
}
if !ancestor.downcast::<Element>().unwrap().disabled_state() {
continue;
}
if ancestor.is_parent_of(node) {
self.set_disabled_state(true);
self.set_enabled_state(false);
return;
}
if let Some(ref legend) = ancestor.children().find(|n| n.is::<HTMLLegendElement>()) {
// XXXabinader: should we save previous ancestor to avoid this iteration?
if node.ancestors().any(|ancestor| ancestor == *legend) {
continue;
}
}
self.set_disabled_state(true);
self.set_enabled_state(false);
return;
}
}
pub fn check_parent_disabled_state_for_option(&self) {
if self.disabled_state() {
return;
}
let node = self.upcast::<Node>();
if let Some(ref parent) = node.GetParentNode() {
if parent.is::<HTMLOptGroupElement>() &&
parent.downcast::<Element>().unwrap().disabled_state() {
self.set_disabled_state(true);
self.set_enabled_state(false);
}
}
}
pub fn check_disabled_attribute(&self) {
let has_disabled_attrib = self.has_attribute(&local_name!("disabled"));
self.set_disabled_state(has_disabled_attrib);
self.set_enabled_state(!has_disabled_attrib);
}
}
#[derive(Clone, Copy)]
pub enum AttributeMutation<'a> {
/// The attribute is set, keep track of old value.
/// https://dom.spec.whatwg.org/#attribute-is-set
Set(Option<&'a AttrValue>),
/// The attribute is removed.
/// https://dom.spec.whatwg.org/#attribute-is-removed
Removed,
}
impl<'a> AttributeMutation<'a> {
pub fn is_removal(&self) -> bool {
match *self {
AttributeMutation::Removed => true,
AttributeMutation::Set(..) => false,
}
}
pub fn new_value<'b>(&self, attr: &'b Attr) -> Option<Ref<'b, AttrValue>> {
match *self {
AttributeMutation::Set(_) => Some(attr.value()),
AttributeMutation::Removed => None,
}
}
}
/// A holder for an element's "tag name", which will be lazily
/// resolved and cached. Should be reset when the document
/// owner changes.
#[derive(JSTraceable, HeapSizeOf)]
struct TagName {
ptr: DOMRefCell<Option<LocalName>>,
}
impl TagName {
fn new() -> TagName {
TagName { ptr: DOMRefCell::new(None) }
}
/// Retrieve a copy of the current inner value. If it is `None`, it is
/// initialized with the result of `cb` first.
fn or_init<F>(&self, cb: F) -> LocalName
where F: FnOnce() -> LocalName
{
match &mut *self.ptr.borrow_mut() {
&mut Some(ref name) => name.clone(),
ptr => {
let name = cb();
*ptr = Some(name.clone());
name
}
}
}
/// Clear the cached tag name, so that it will be re-calculated the
/// next time that `or_init()` is called.
fn clear(&self) {
*self.ptr.borrow_mut() = None;
}
}
pub struct ElementPerformFullscreenEnter {
element: Trusted<Element>,
promise: TrustedPromise,
error: bool,
}
impl ElementPerformFullscreenEnter {
pub fn new(element: Trusted<Element>, promise: TrustedPromise, error: bool) -> Box<ElementPerformFullscreenEnter> {
box ElementPerformFullscreenEnter {
element: element,
promise: promise,
error: error,
}
}
}
impl Runnable for ElementPerformFullscreenEnter {
fn name(&self) -> &'static str { "ElementPerformFullscreenEnter" }
#[allow(unrooted_must_root)]
fn handler(self: Box<ElementPerformFullscreenEnter>) {
let element = self.element.root();
let document = document_from_node(element.r());
// Step 7.1
if self.error || !element.fullscreen_element_ready_check() {
// JSAutoCompartment needs to be manually made.
// Otherwise, Servo will crash.
let promise = self.promise.root();
let promise_cx = promise.global().get_cx();
let _ac = JSAutoCompartment::new(promise_cx, promise.reflector().get_jsobject().get());
document.upcast::<EventTarget>().fire_event(atom!("fullscreenerror"));
promise.reject_error(promise.global().get_cx(), Error::Type(String::from("fullscreen is not connected")));
return
}
// TODO Step 7.2-4
// Step 7.5
element.set_fullscreen_state(true);
document.set_fullscreen_element(Some(&element));
document.window().reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::ElementStateChanged);
// Step 7.6
document.upcast::<EventTarget>().fire_event(atom!("fullscreenchange"));
// Step 7.7
// JSAutoCompartment needs to be manually made.
// Otherwise, Servo will crash.
let promise = self.promise.root();
let promise_cx = promise.global().get_cx();
let _ac = JSAutoCompartment::new(promise_cx, promise.reflector().get_jsobject().get());
promise.resolve(promise.global().get_cx(), HandleValue::undefined());
}
}
pub struct ElementPerformFullscreenExit {
element: Trusted<Element>,
promise: TrustedPromise,
}
impl ElementPerformFullscreenExit {
pub fn new(element: Trusted<Element>, promise: TrustedPromise) -> Box<ElementPerformFullscreenExit> {
box ElementPerformFullscreenExit {
element: element,
promise: promise,
}
}
}
impl Runnable for ElementPerformFullscreenExit {
fn name(&self) -> &'static str { "ElementPerformFullscreenExit" }
#[allow(unrooted_must_root)]
fn handler(self: Box<ElementPerformFullscreenExit>) {
let element = self.element.root();
let document = document_from_node(element.r());
// TODO Step 9.1-5
// Step 9.6
element.set_fullscreen_state(false);
document.window().reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::ElementStateChanged);
document.set_fullscreen_element(None);
// Step 9.8
document.upcast::<EventTarget>().fire_event(atom!("fullscreenchange"));
// Step 9.10
let promise = self.promise.root();
// JSAutoCompartment needs to be manually made.
// Otherwise, Servo will crash.
let promise_cx = promise.global().get_cx();
let _ac = JSAutoCompartment::new(promise_cx, promise.reflector().get_jsobject().get());
promise.resolve(promise.global().get_cx(), HandleValue::undefined());
}
}
pub fn reflect_cross_origin_attribute(element: &Element) -> Option<DOMString> {
let attr = element.get_attribute(&ns!(), &local_name!("crossorigin"));
if let Some(mut val) = attr.map(|v| v.Value()) {
val.make_ascii_lowercase();
if val == "anonymous" || val == "use-credentials" {
return Some(val);
}
return Some(DOMString::from("anonymous"));
}
None
}
pub fn set_cross_origin_attribute(element: &Element, value: Option<DOMString>) {
match value {
Some(val) => element.set_string_attribute(&local_name!("crossorigin"), val),
None => {
element.remove_attribute(&ns!(), &local_name!("crossorigin"));
}
}
}
pub fn cors_setting_for_element(element: &Element) -> Option<CorsSettings> {
reflect_cross_origin_attribute(element).map_or(None, |attr| {
match &*attr {
"anonymous" => Some(CorsSettings::Anonymous),
"use-credentials" => Some(CorsSettings::UseCredentials),
_ => unreachable!()
}
})
}<|fim▁end|> | local_name!("menuitem") | local_name!("meta") | |
<|file_name|>cacheDevice.js<|end_file_name|><|fim▁begin|>/**
* Created by quanpower on 14-8-20.
*/
var config = require('./../config');
var redis = require('./redis');
var _ = require('lodash');
function cacheDevice(device){
if(device){
var cloned = _.clone(device);
redis.set('DEVICE_' + device.uuid, JSON.stringify(cloned),function(){
//console.log('cached', uuid);
});
}
}
function noop(){}
if(config.redis){
module.exports = cacheDevice;
}
else{
module.exports = noop;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>issue-41868.rs<|end_file_name|><|fim▁begin|>// check-pass
// Defaulted assoc. types should normalize properly in impls that don't
// override them.
#![feature(associated_type_defaults)]
pub struct Foo;
<|fim▁hole|> fn read(rdr: &mut Foo) -> Option<Self::Output>;
}
impl CanDecode for u8 {
fn read(rdr: &mut Foo) -> Option<Self::Output> { Some(42) }
}
impl CanDecode for u16 {
fn read(rdr: &mut Foo) -> Option<u16> { Some(17) }
}
fn main() {}<|fim▁end|> | pub trait CanDecode: Sized {
type Output = Self; |
<|file_name|>mv_sink.js<|end_file_name|><|fim▁begin|>function glitch_frame(frame)
{
// bail out if we have no motion vectors
let mvs = frame["mv"];
if ( !mvs )
return;
// bail out if we have no forward motion vectors
let fwd_mvs = mvs["forward"];
<|fim▁hole|> return;
// clear horizontal element of all motion vectors
for ( let i = 0; i < fwd_mvs.length; i++ )
{
// loop through all rows
let row = fwd_mvs[i];
for ( let j = 0; j < row.length; j++ )
{
// loop through all macroblocks
let mv = row[j];
// THIS IS WHERE THE MAGIC HAPPENS
mv[0] = 0; // this sets the horizontal motion vector to zero
// mv[1] = 0; // you could also change the vertical motion vector
}
}
}<|fim▁end|> | if ( !fwd_mvs )
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Simple timestamping service implementation.
use exonum::{
crypto::KeyPair,
merkledb::ObjectHash,
runtime::{ExecutionContext, ExecutionError, SnapshotExt},
};
use exonum_derive::{exonum_interface, ServiceDispatcher, ServiceFactory};
use exonum_rust_runtime::{spec::Spec, Service};
use exonum_testkit::TestKitBuilder;
#[exonum_interface(auto_ids)]
trait TimestampingInterface<Ctx> {
type Output;
fn timestamp(&self, ctx: Ctx, arg: String) -> Self::Output;
}<|fim▁hole|>
#[derive(Debug, ServiceDispatcher, ServiceFactory)]
#[service_factory(artifact_name = "timestamping", artifact_version = "1.0.0")]
#[service_dispatcher(implements("TimestampingInterface"))]
struct TimestampingService;
impl TimestampingInterface<ExecutionContext<'_>> for TimestampingService {
type Output = Result<(), ExecutionError>;
fn timestamp(&self, _ctx: ExecutionContext<'_>, _arg: String) -> Self::Output {
Ok(())
}
}
impl Service for TimestampingService {}
fn main() {
let instance_id = 512;
// Create a testkit for a network with four validators.
let service = TimestampingService;
let mut testkit = TestKitBuilder::validator()
.with_validators(4)
.with(Spec::new(service).with_instance(instance_id, "timestamping", ()))
.build();
// Create few transactions.
let keypair = KeyPair::random();
let tx1 = keypair.timestamp(instance_id, "Down To Earth".to_owned());
let tx2 = keypair.timestamp(instance_id, "Cry Over Spilt Milk".to_owned());
let tx3 = keypair.timestamp(instance_id, "Dropping Like Flies".to_owned());
// Commit them into blockchain.
let block = testkit.create_block_with_transactions(vec![tx1.clone(), tx2.clone(), tx3.clone()]);
assert_eq!(block.len(), 3);
assert!(block.iter().all(|transaction| transaction.status().is_ok()));
// Check results with the core schema.
let snapshot = testkit.snapshot();
let schema = snapshot.for_core();
assert!(schema.transactions().contains(&tx1.object_hash()));
assert!(schema.transactions().contains(&tx2.object_hash()));
assert!(schema.transactions().contains(&tx3.object_hash()));
}<|fim▁end|> | |
<|file_name|>manager.py<|end_file_name|><|fim▁begin|># ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from logging import getLogger
from eos import __version__ as eos_version
from eos.eve_obj_builder import EveObjBuilder
from eos.util.repr import make_repr_str
from .exception import ExistingSourceError
from .exception import UnknownSourceError
from .source import Source
<|fim▁hole|>
class SourceManager:
"""Manages data sources.
Handle and access different sources in an easy way. Useful for cases when
you want to work with, for example, Tranquility and Singularity data at the
same time.
"""
# Format: {literal alias: Source}
_sources = {}
# Default source, will be used implicitly when instantiating fit
default = None
@classmethod
def add(cls, alias, data_handler, cache_handler, make_default=False):
"""Add source to source manager.
Adding includes initializing all facilities hidden behind name 'source'.
After source has been added, it is accessible with alias.
Args:
alias: Alias under which source will be accessible.
data_handler: Data handler instance.
cache_handler: Cache handler instance.
make_default (optional): Do we need to mark passed source as default
or not. Default source will be used for instantiating new fits,
if no other source is specified.
"""
logger.info('adding source with alias "{}"'.format(alias))
if alias in cls._sources:
raise ExistingSourceError(alias)
# Compare fingerprints from data and cache
cache_fp = cache_handler.get_fingerprint()
data_version = data_handler.get_version()
current_fp = cls.__format_fingerprint(data_version)
# If data version is corrupt or fingerprints mismatch, update cache
if data_version is None or cache_fp != current_fp:
if data_version is None:
logger.info('data version is None, updating cache')
else:
msg = (
'fingerprint mismatch: cache "{}", data "{}", '
'updating cache'
).format(cache_fp, current_fp)
logger.info(msg)
# Generate eve objects and cache them, as generation takes
# significant amount of time
eve_objects = EveObjBuilder.run(data_handler)
cache_handler.update_cache(eve_objects, current_fp)
# Finally, add record to list of sources
source = Source(alias=alias, cache_handler=cache_handler)
cls._sources[alias] = source
if make_default is True:
cls.default = source
@classmethod
def get(cls, alias):
"""Using source alias, return source.
Args:
alias: Alias of source to return.
Returns:
Source instance.
"""
try:
return cls._sources[alias]
except KeyError:
raise UnknownSourceError(alias)
@classmethod
def remove(cls, alias):
"""Remove source by alias.
Args:
alias: Alias of source to remove.
"""
logger.info('removing source with alias "{}"'.format(alias))
try:
del cls._sources[alias]
except KeyError:
raise UnknownSourceError(alias)
@classmethod
def list(cls):
return list(cls._sources.keys())
@staticmethod
def __format_fingerprint(data_version):
return '{}_{}'.format(data_version, eos_version)
@classmethod
def __repr__(cls):
spec = [['sources', '_sources']]
return make_repr_str(cls, spec)<|fim▁end|> | logger = getLogger(__name__)
|
<|file_name|>deck.repl.js<|end_file_name|><|fim▁begin|>function content($slide) {
return $slide.children().first().nextAll();
}
function addReplToSlide($, deck, $slide) {
var endpoint = $[deck]('getOptions').repl.endpoint;
content($slide).wrapAll('<div class="repl-slide-column repl-text-column"></div>');
var replHtmlId = "console-" + $slide[0].id;
$('<div/>', { id: replHtmlId, class: 'repl-slide-column repl-console-column' })
.appendTo($slide);
$('<script></script>')
.append("$(function () { newConsole('" + endpoint + "', $('#" + replHtmlId + "')); });")
.appendTo($slide);
content($slide).wrapAll('<div class="repl-slide-columns"></div>');
}
function protocol() {
switch (location.protocol) {
case 'https:': return 'wss:';
default: return 'ws:';
}
}
function url(endpoint) {
return protocol() + endpoint;
}
function getContext(element) {
return element.attr('data-repl-context') || element.parents('[data-repl-context]').attr('data-repl-context');
}
function hasContext(element) {
var ctx = getContext(element);
return ctx !== undefined && ctx !== "";
}
function newConsole(endpoint, element) {
var replContext = getContext(element);
var jqconsole = element.jqconsole("", "> ");
var startPrompt;
var writeText = function(text) {
jqconsole.Write(text, 'jqconsole-output');
startPrompt();
};
var writeError = function(text) {
jqconsole.Write(text, 'jqconsole-error');
startPrompt();
}
jqconsole.Disable();
addFullscreenHint(element);
if (endpoint) {
var connect = function () {
var ws = new WebSocket(url(endpoint));
ws.onmessage = function(event) {
jqconsole.Enable();
writeText(event.data);
};
ws.onerror = function(event) {
writeError("Connection error\n");
};
ws.onopen = function(event) {
ws.send("/load " + replContext);
};
return ws;
}
var ws = connect();<|fim▁hole|> if (input === '/reconnect') {
ws = connect();
} else if (input !== '') {
if (ws.readyState === WebSocket.OPEN) {
ws.send(input);
} else {
writeError("Not connected.");
}
}
});
};
var setup = function() {
jqconsole.RegisterShortcut('L', reset);
startPrompt();
};
var reset = function() {
var history = jqconsole.GetHistory();
jqconsole.Reset();
jqconsole.SetHistory(history);
setup();
};
setup();
} else {
startPrompt = function() {};
writeText("REPL offline.\n" +
"No livecoding for you :-(");
jqconsole.Prompt(true, function() {});
}
};
function addFullscreenHint(element) {
$('<div/>', { class: 'repl-fullscreen-hint', text: 'Fullscreen — Hit F to quit' }).appendTo(element);
}
function toggleOrder(i, order) {
switch (order) {
case '0': return '1';
default: return '0';
}
}
function isKey(e, keyValue) {
return e.which === keyValue || $.inArray(e.which, keyValue) > -1;
}
(function($, deck, window, undefined) {
var $d = $(document);
/*
Extends defaults/options.
options.keys.replPositionToggle
Key to toggle REPL position between left and right (right by default).
Default key is 'T'.
options.keys.replFullscreenToggle
Key to toggle REPL to fullscreen, hiding the other column and slide title.
Default key is 'F'.
options.repl.endpoint
URL of the websocket endpoint to use for REPL without the protocol part.
*/
$.extend(true, $[deck].defaults, {
classes: {
repl: 'deck-repl'
},
keys: {
replPositionToggle: 84, // t
replFullscreenToggle: 70 // f
},
repl: {
endpoint: ''
}
});
$d.bind('deck.beforeInit', function() {
if ($[deck]('getOptions').repl.endpoint) {
warnAgainstCtrlW($);
}
$.each($[deck]('getSlides'), function(i, $slide) {
if ($slide.hasClass('repl') && hasContext($slide)) {
addReplToSlide($, deck, $slide);
}
});
});
/*
jQuery.deck('toggleReplPosition')
Toggles REPL position (right column first).
*/
$[deck]('extend', 'toggleReplPosition', function() {
$('.repl-console-column').css('order', toggleOrder);
});
$[deck]('extend', 'toggleReplFullscreen', function() {
$('.deck-current .repl-slide-columns').siblings().toggle();
$('.deck-current .repl-text-column').toggle();
$('.deck-current .repl-console-column').toggleClass('repl-console-column-fullscreen');
});
$d.bind('deck.init', function() {
var opts = $[deck]('getOptions');
// Bind key events
$d.unbind('keydown.deckrepl').bind('keydown.deckrepl', function(e) {
if (isKey(e, opts.keys.replPositionToggle)) {
$[deck]('toggleReplPosition');
e.preventDefault();
}
if (isKey(e, opts.keys.replFullscreenToggle)) {
$[deck]('toggleReplFullscreen');
e.preventDefault();
}
});
});
})(jQuery, 'deck', this);
function warnAgainstCtrlW($) {
$(window).on('beforeunload', function(e) {
return 'Bad habit of deleting words with Ctrl-W? ESC to stay here.';
});
}<|fim▁end|> |
startPrompt = function () {
jqconsole.Prompt(true, function (input) { |
<|file_name|>test_verification.py<|end_file_name|><|fim▁begin|>from rest_framework import status
from rest_framework.authtoken.models import Token
from django.utils.translation import ugettext_lazy as _
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from drfpasswordless.settings import api_settings, DEFAULTS
from drfpasswordless.utils import CallbackToken
User = get_user_model()
class AliasEmailVerificationTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_AUTH_TYPES = ['EMAIL']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = '[email protected]'
api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED = True
self.url = reverse('drfpasswordless:auth_email')
self.callback_url = reverse('drfpasswordless:auth_token')
self.verify_url = reverse('drfpasswordless:verify_email')
self.callback_verify = reverse('drfpasswordless:verify_token')
self.email_field_name = api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME
self.email_verified_field_name = api_settings.PASSWORDLESS_USER_EMAIL_VERIFIED_FIELD_NAME
def test_email_unverified_to_verified_and_back(self):
email = '[email protected]'
email2 = '[email protected]'
data = {'email': email}
# create a new user
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.email_field_name: email})
self.assertNotEqual(user, None)
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Verify a token exists for the user, sign in and check verified again
callback = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_AUTH, is_active=True).first()
callback_data = {'email': email, 'token': callback}
callback_response = self.client.post(self.callback_url, callback_data)
self.assertEqual(callback_response.status_code, status.HTTP_200_OK)
# Verify we got the token, then check and see that email_verified is now verified
token = callback_response.data['token']
self.assertEqual(token, Token.objects.get(user=user).key)
# Refresh and see that the endpoint is now verified as True
user.refresh_from_db()
self.assertEqual(getattr(user, self.email_verified_field_name), True)
# Change email, should result in flag changing to false
setattr(user, self.email_field_name, email2)
user.save()
user.refresh_from_db()
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Verify
self.client.force_authenticate(user)
verify_response = self.client.post(self.verify_url)
self.assertEqual(verify_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.email_field_name: email2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.email_field_name), None)
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Post callback token back.
verify_token = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_VERIFY, is_active=True).first()
self.assertNotEqual(verify_token, None)
verify_callback_response = self.client.post(self.callback_verify, {'email': email2, 'token': verify_token.key})
self.assertEqual(verify_callback_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.email_field_name: email2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.email_field_name), None)
self.assertEqual(getattr(user, self.email_verified_field_name), True)
def tearDown(self):
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_EMAIL_NOREPLY_ADDRESS']
api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED = DEFAULTS['PASSWORDLESS_USER_MARK_MOBILE_VERIFIED']
class AliasMobileVerificationTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = True
api_settings.PASSWORDLESS_AUTH_TYPES = ['MOBILE']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER = '+15550000000'
api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED = True
self.url = reverse('drfpasswordless:auth_mobile')
self.callback_url = reverse('drfpasswordless:auth_token')
self.verify_url = reverse('drfpasswordless:verify_mobile')
self.callback_verify = reverse('drfpasswordless:verify_token')
self.mobile_field_name = api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME
self.mobile_verified_field_name = api_settings.PASSWORDLESS_USER_MOBILE_VERIFIED_FIELD_NAME
def test_mobile_unverified_to_verified_and_back(self):<|fim▁hole|> mobile = '+15551234567'
mobile2 = '+15557654321'
data = {'mobile': mobile}
# create a new user
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.mobile_field_name: mobile})
self.assertNotEqual(user, None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Verify a token exists for the user, sign in and check verified again
callback = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_AUTH, is_active=True).first()
callback_data = {'mobile': mobile, 'token': callback}
callback_response = self.client.post(self.callback_url, callback_data)
self.assertEqual(callback_response.status_code, status.HTTP_200_OK)
# Verify we got the token, then check and see that email_verified is now verified
token = callback_response.data['token']
self.assertEqual(token, Token.objects.get(user=user).key)
# Refresh and see that the endpoint is now verified as True
user.refresh_from_db()
self.assertEqual(getattr(user, self.mobile_verified_field_name), True)
# Change mobile, should result in flag changing to false
setattr(user, self.mobile_field_name, '+15557654321')
user.save()
user.refresh_from_db()
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Verify
self.client.force_authenticate(user)
verify_response = self.client.post(self.verify_url)
self.assertEqual(verify_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.mobile_field_name: mobile2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.mobile_field_name), None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Post callback token back.
verify_token = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_VERIFY, is_active=True).first()
self.assertNotEqual(verify_token, None)
verify_callback_response = self.client.post(self.callback_verify, {'mobile': mobile2, 'token': verify_token.key})
self.assertEqual(verify_callback_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.mobile_field_name: mobile2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.mobile_field_name), None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), True)
def tearDown(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = DEFAULTS['PASSWORDLESS_TEST_SUPPRESSION']
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_MOBILE_NOREPLY_NUMBER']
api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED = DEFAULTS['PASSWORDLESS_USER_MARK_MOBILE_VERIFIED']<|fim▁end|> | |
<|file_name|>config.js<|end_file_name|><|fim▁begin|>/*global define*/
/*global test*/
/*global equal*/
define(['models/config'], function (Model) {
'use strict';
module('Config model');
test('Can be created with default values', function() {
var note = new Model();
equal(note.get('name'), '', 'For default config name is empty');
equal(note.get('value'), '', 'For default config value is empty');
});
test('Update attributes', function(){
var note = new Model();<|fim▁hole|> note.set('name', 'new-config');
equal(note.get('name'), 'new-config');
equal(note.get('value'), '', 'For default config value is empty');
});
});<|fim▁end|> | |
<|file_name|>patch_ref_creator_field.py<|end_file_name|><|fim▁begin|>from apps.plus_permissions.default_agents import get_admin_user
from apps.plus_permissions.models import GenericReference
def patch():
for ref in GenericReference.objects.filter(creator=None):<|fim▁hole|>
patch()<|fim▁end|> | ref.creator = get_admin_user()
ref.save() |
<|file_name|>postupgrade.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package upgrade
import (
"fmt"
"io/ioutil"
"os"<|fim▁hole|> "path/filepath"
"time"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/errors"
clientset "k8s.io/client-go/kubernetes"
certutil "k8s.io/client-go/util/cert"
kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm"
kubeadmapiv1alpha3 "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm/v1alpha3"
kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants"
"k8s.io/kubernetes/cmd/kubeadm/app/features"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/addons/dns"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/addons/proxy"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/bootstraptoken/clusterinfo"
nodebootstraptoken "k8s.io/kubernetes/cmd/kubeadm/app/phases/bootstraptoken/node"
certsphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/certs"
kubeletphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/kubelet"
patchnodephase "k8s.io/kubernetes/cmd/kubeadm/app/phases/patchnode"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/selfhosting"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/uploadconfig"
"k8s.io/kubernetes/cmd/kubeadm/app/util/apiclient"
dryrunutil "k8s.io/kubernetes/cmd/kubeadm/app/util/dryrun"
"k8s.io/kubernetes/pkg/util/version"
)
var expiry = 180 * 24 * time.Hour
// PerformPostUpgradeTasks runs nearly the same functions as 'kubeadm init' would do
// Note that the markmaster phase is left out, not needed, and no token is created as that doesn't belong to the upgrade
func PerformPostUpgradeTasks(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, newK8sVer *version.Version, dryRun bool) error {
errs := []error{}
// Upload currently used configuration to the cluster
// Note: This is done right in the beginning of cluster initialization; as we might want to make other phases
// depend on centralized information from this source in the future
if err := uploadconfig.UploadConfiguration(cfg, client); err != nil {
errs = append(errs, err)
}
// Create the new, version-branched kubelet ComponentConfig ConfigMap
if err := kubeletphase.CreateConfigMap(cfg, client); err != nil {
errs = append(errs, fmt.Errorf("error creating kubelet configuration ConfigMap: %v", err))
}
// Write the new kubelet config down to disk and the env file if needed
if err := writeKubeletConfigFiles(client, cfg, newK8sVer, dryRun); err != nil {
errs = append(errs, err)
}
// Annotate the node with the crisocket information, sourced either from the InitConfiguration struct or
// --cri-socket.
// TODO: In the future we want to use something more official like NodeStatus or similar for detecting this properly
if err := patchnodephase.AnnotateCRISocket(client, cfg.NodeRegistration.Name, cfg.NodeRegistration.CRISocket); err != nil {
errs = append(errs, fmt.Errorf("error uploading crisocket: %v", err))
}
// Create/update RBAC rules that makes the bootstrap tokens able to post CSRs
if err := nodebootstraptoken.AllowBootstrapTokensToPostCSRs(client); err != nil {
errs = append(errs, err)
}
// Create/update RBAC rules that makes the bootstrap tokens able to get their CSRs approved automatically
if err := nodebootstraptoken.AutoApproveNodeBootstrapTokens(client); err != nil {
errs = append(errs, err)
}
// Create/update RBAC rules that makes the nodes to rotate certificates and get their CSRs approved automatically
if err := nodebootstraptoken.AutoApproveNodeCertificateRotation(client); err != nil {
errs = append(errs, err)
}
// Upgrade to a self-hosted control plane if possible
if err := upgradeToSelfHosting(client, cfg, dryRun); err != nil {
errs = append(errs, err)
}
// TODO: Is this needed to do here? I think that updating cluster info should probably be separate from a normal upgrade
// Create the cluster-info ConfigMap with the associated RBAC rules
// if err := clusterinfo.CreateBootstrapConfigMapIfNotExists(client, kubeadmconstants.GetAdminKubeConfigPath()); err != nil {
// return err
//}
// Create/update RBAC rules that makes the cluster-info ConfigMap reachable
if err := clusterinfo.CreateClusterInfoRBACRules(client); err != nil {
errs = append(errs, err)
}
// Rotate the kube-apiserver cert and key if needed
if err := backupAPIServerCertIfNeeded(cfg, dryRun); err != nil {
errs = append(errs, err)
}
// Upgrade kube-dns/CoreDNS and kube-proxy
if err := dns.EnsureDNSAddon(cfg, client); err != nil {
errs = append(errs, err)
}
// Remove the old DNS deployment if a new DNS service is now used (kube-dns to CoreDNS or vice versa)
if err := removeOldDNSDeploymentIfAnotherDNSIsUsed(cfg, client, dryRun); err != nil {
errs = append(errs, err)
}
if err := proxy.EnsureProxyAddon(cfg, client); err != nil {
errs = append(errs, err)
}
return errors.NewAggregate(errs)
}
func removeOldDNSDeploymentIfAnotherDNSIsUsed(cfg *kubeadmapi.InitConfiguration, client clientset.Interface, dryRun bool) error {
return apiclient.TryRunCommand(func() error {
installedDeploymentName := kubeadmconstants.KubeDNS
deploymentToDelete := kubeadmconstants.CoreDNS
if features.Enabled(cfg.FeatureGates, features.CoreDNS) {
installedDeploymentName = kubeadmconstants.CoreDNS
deploymentToDelete = kubeadmconstants.KubeDNS
}
// If we're dry-running, we don't need to wait for the new DNS addon to become ready
if !dryRun {
dnsDeployment, err := client.AppsV1().Deployments(metav1.NamespaceSystem).Get(installedDeploymentName, metav1.GetOptions{})
if err != nil {
return err
}
if dnsDeployment.Status.ReadyReplicas == 0 {
return fmt.Errorf("the DNS deployment isn't ready yet")
}
}
// We don't want to wait for the DNS deployment above to become ready when dryrunning (as it never will)
// but here we should execute the DELETE command against the dryrun clientset, as it will only be logged
err := apiclient.DeleteDeploymentForeground(client, metav1.NamespaceSystem, deploymentToDelete)
if err != nil && !apierrors.IsNotFound(err) {
return err
}
return nil
}, 10)
}
func upgradeToSelfHosting(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, dryRun bool) error {
if features.Enabled(cfg.FeatureGates, features.SelfHosting) && !IsControlPlaneSelfHosted(client) {
waiter := getWaiter(dryRun, client)
// kubeadm will now convert the static Pod-hosted control plane into a self-hosted one
fmt.Println("[self-hosted] Creating self-hosted control plane.")
if err := selfhosting.CreateSelfHostedControlPlane(kubeadmconstants.GetStaticPodDirectory(), kubeadmconstants.KubernetesDir, cfg, client, waiter, dryRun); err != nil {
return fmt.Errorf("error creating self hosted control plane: %v", err)
}
}
return nil
}
func backupAPIServerCertIfNeeded(cfg *kubeadmapi.InitConfiguration, dryRun bool) error {
certAndKeyDir := kubeadmapiv1alpha3.DefaultCertificatesDir
shouldBackup, err := shouldBackupAPIServerCertAndKey(certAndKeyDir)
if err != nil {
// Don't fail the upgrade phase if failing to determine to backup kube-apiserver cert and key.
return fmt.Errorf("[postupgrade] WARNING: failed to determine to backup kube-apiserver cert and key: %v", err)
}
if !shouldBackup {
return nil
}
// If dry-running, just say that this would happen to the user and exit
if dryRun {
fmt.Println("[postupgrade] Would rotate the API server certificate and key.")
return nil
}
// Don't fail the upgrade phase if failing to backup kube-apiserver cert and key, just continue rotating the cert
// TODO: We might want to reconsider this choice.
if err := backupAPIServerCertAndKey(certAndKeyDir); err != nil {
fmt.Printf("[postupgrade] WARNING: failed to backup kube-apiserver cert and key: %v", err)
}
return certsphase.CreateAPIServerCertAndKeyFiles(cfg)
}
func writeKubeletConfigFiles(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, newK8sVer *version.Version, dryRun bool) error {
kubeletDir, err := getKubeletDir(dryRun)
if err != nil {
// The error here should never occur in reality, would only be thrown if /tmp doesn't exist on the machine.
return err
}
errs := []error{}
// Write the configuration for the kubelet down to disk so the upgraded kubelet can start with fresh config
if err := kubeletphase.DownloadConfig(client, newK8sVer, kubeletDir); err != nil {
// Tolerate the error being NotFound when dryrunning, as there is a pretty common scenario: the dryrun process
// *would* post the new kubelet-config-1.X configmap that doesn't exist now when we're trying to download it
// again.
if !(apierrors.IsNotFound(err) && dryRun) {
errs = append(errs, fmt.Errorf("error downloading kubelet configuration from the ConfigMap: %v", err))
}
}
if dryRun { // Print what contents would be written
dryrunutil.PrintDryRunFile(kubeadmconstants.KubeletConfigurationFileName, kubeletDir, kubeadmconstants.KubeletRunDirectory, os.Stdout)
}
envFilePath := filepath.Join(kubeadmconstants.KubeletRunDirectory, kubeadmconstants.KubeletEnvFileName)
if _, err := os.Stat(envFilePath); os.IsNotExist(err) {
// Write env file with flags for the kubelet to use. We do not need to write the --register-with-taints for the master,
// as we handle that ourselves in the markmaster phase
// TODO: Maybe we want to do that some time in the future, in order to remove some logic from the markmaster phase?
if err := kubeletphase.WriteKubeletDynamicEnvFile(&cfg.NodeRegistration, cfg.FeatureGates, false, kubeletDir); err != nil {
errs = append(errs, fmt.Errorf("error writing a dynamic environment file for the kubelet: %v", err))
}
if dryRun { // Print what contents would be written
dryrunutil.PrintDryRunFile(kubeadmconstants.KubeletEnvFileName, kubeletDir, kubeadmconstants.KubeletRunDirectory, os.Stdout)
}
}
return errors.NewAggregate(errs)
}
// getWaiter gets the right waiter implementation for the right occasion
// TODO: Consolidate this with what's in init.go?
func getWaiter(dryRun bool, client clientset.Interface) apiclient.Waiter {
if dryRun {
return dryrunutil.NewWaiter()
}
return apiclient.NewKubeWaiter(client, 30*time.Minute, os.Stdout)
}
// getKubeletDir gets the kubelet directory based on whether the user is dry-running this command or not.
// TODO: Consolidate this with similar funcs?
func getKubeletDir(dryRun bool) (string, error) {
if dryRun {
return ioutil.TempDir("", "kubeadm-upgrade-dryrun")
}
return kubeadmconstants.KubeletRunDirectory, nil
}
// backupAPIServerCertAndKey backups the old cert and key of kube-apiserver to a specified directory.
func backupAPIServerCertAndKey(certAndKeyDir string) error {
subDir := filepath.Join(certAndKeyDir, "expired")
if err := os.Mkdir(subDir, 0766); err != nil {
return fmt.Errorf("failed to created backup directory %s: %v", subDir, err)
}
filesToMove := map[string]string{
filepath.Join(certAndKeyDir, kubeadmconstants.APIServerCertName): filepath.Join(subDir, kubeadmconstants.APIServerCertName),
filepath.Join(certAndKeyDir, kubeadmconstants.APIServerKeyName): filepath.Join(subDir, kubeadmconstants.APIServerKeyName),
}
return moveFiles(filesToMove)
}
// moveFiles moves files from one directory to another.
func moveFiles(files map[string]string) error {
filesToRecover := map[string]string{}
for from, to := range files {
if err := os.Rename(from, to); err != nil {
return rollbackFiles(filesToRecover, err)
}
filesToRecover[to] = from
}
return nil
}
// rollbackFiles moves the files back to the original directory.
func rollbackFiles(files map[string]string, originalErr error) error {
errs := []error{originalErr}
for from, to := range files {
if err := os.Rename(from, to); err != nil {
errs = append(errs, err)
}
}
return fmt.Errorf("couldn't move these files: %v. Got errors: %v", files, errors.NewAggregate(errs))
}
// shouldBackupAPIServerCertAndKey checks if the cert of kube-apiserver will be expired in 180 days.
func shouldBackupAPIServerCertAndKey(certAndKeyDir string) (bool, error) {
apiServerCert := filepath.Join(certAndKeyDir, kubeadmconstants.APIServerCertName)
certs, err := certutil.CertsFromFile(apiServerCert)
if err != nil {
return false, fmt.Errorf("couldn't load the certificate file %s: %v", apiServerCert, err)
}
if len(certs) == 0 {
return false, fmt.Errorf("no certificate data found")
}
if time.Now().Sub(certs[0].NotBefore) > expiry {
return true, nil
}
return false, nil
}<|fim▁end|> | |
<|file_name|>Tw2ParticleEmitter.js<|end_file_name|><|fim▁begin|>/* eslint no-unused-vars:0 */
import {util} from '../../math';
/**
* Particle Emitter base class
*
* @property {number|string} id
* @property {string} name
* @property {Tw2ParticleSystem} particleSystem
* @class
*/
export class Tw2ParticleEmitter
{<|fim▁hole|> this._id = util.generateID();
this.name = '';
this.particleSystem = null;
}
/**
* Initializes the particle emitter
*/
Initialize()
{
}
/**
* Per frame update
* @param {number} dt - delta time
*/
Update(dt)
{
}
}<|fim▁end|> | constructor()
{ |
<|file_name|>ldap.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
from flask_login import AnonymousUserMixin
from .interface import BUIhandler, BUIuser, BUIloader
from ...utils import __
import ssl
try:
from ldap3 import (
Server,
Connection,
Tls,
ALL,
RESTARTABLE,
AUTO_BIND_TLS_BEFORE_BIND,
AUTO_BIND_NONE,
SIMPLE,
)
except ImportError:
raise ImportError("Unable to load 'ldap3' module")
class LdapLoader(BUIloader):
"""The :class:`burpui.misc.auth.ldap.LdapLoader` handles searching for and
binding as a :class:`burpui.misc.auth.ldap.LdapUser` user.
"""
section = name = "LDAP:AUTH"
def __init__(self, app=None, handler=None):
""":func:`burpui.misc.auth.ldap.LdapLoader.__init__` establishes a
connection to the LDAP server.
:param app: Instance of the app we are running in
:type app: :class:`burpui.engines.server.BUIServer`
"""
self.app = app
conf = self.app.conf
handler.name = self.name
defaults = {
"LDAP:AUTH": {
"host": "localhost",
"port": None,
"encryption": None,
"binddn": None,
"bindpw": None,
"filter": None,
"base": None,
"searchattr": "uid",
"validate": "none",
"cafile": None,
}
}
mapping = {
"host": "host",
"port": "port",
"encryption": "encryption",
"filt": "filter",
"base": "base",
"attr": "searchattr",
"binddn": "binddn",
"bindpw": "bindpw",
"validate": "validate",
"cafile": "cafile",
}
conf.update_defaults(defaults)
# Maybe the handler argument is None, maybe the 'priority'
# option is missing. We don't care.
try:
handler.priority = (
conf.safe_get("priority", "integer", section=self.section)
or handler.priority
)
except:
pass
for (opt, key) in mapping.items():
setattr(self, opt, conf.safe_get(key, "force_string", section=self.section))
if self.validate and self.validate.lower() in ["none", "optional", "required"]:
self.validate = getattr(ssl, "CERT_{}".format(self.validate.upper()))
else:
self.validate = None
self.version = ssl.OP_NO_SSLv3
self.users = []
self.tls = None
self.ssl = False
self.auto_bind = AUTO_BIND_NONE
if self.encryption == "ssl":
self.ssl = True
elif self.encryption == "tls":
self.tls = Tls(
local_certificate_file=self.cafile,
validate=self.validate,
version=self.version,
)
self.auto_bind = AUTO_BIND_TLS_BEFORE_BIND
if self.port:
try:
self.port = int(self.port)
except ValueError:
self.logger.error("LDAP port must be a valid integer")
self.port = None
self.logger.info("LDAP host: {0}".format(self.host))
self.logger.info("LDAP port: {0}".format(self.port))
self.logger.info("LDAP encryption: {0}".format(self.encryption))
self.logger.info("LDAP filter: {0}".format(self.filt))
self.logger.info("LDAP base: {0}".format(self.base))
self.logger.info("LDAP search attr: {0}".format(self.attr))
self.logger.info("LDAP binddn: {0}".format(self.binddn))
self.logger.info("LDAP bindpw: {0}".format("*****" if self.bindpw else "None"))
self.logger.info("TLS object: {0}".format(self.tls))
try:
self.server = Server(
host=self.host,
port=self.port,
use_ssl=self.ssl,
get_info=ALL,
tls=self.tls,
)
self.logger.debug("LDAP Server = {0}".format(str(self.server)))
if self.binddn:
self.ldap = Connection(
self.server,
user=self.binddn,
password=self.bindpw,
raise_exceptions=True,
client_strategy=RESTARTABLE,
auto_bind=self.auto_bind,
authentication=SIMPLE,
)
else:
self.ldap = Connection(
self.server,
raise_exceptions=True,
client_strategy=RESTARTABLE,
auto_bind=self.auto_bind,
)
okay = False
with self.ldap:
self.logger.debug("LDAP Connection = {0}".format(str(self.ldap)))
self.logger.info("OK, connected to LDAP")
okay = True
if not okay:
raise Exception("Not connected")
self._prefetch()
except Exception as e:
self.logger.error("Could not connect to LDAP: {0}".format(str(e)))
self.server = None
self.ldap = None
def __exit__(self, exc_type, exc_value, traceback):
""":func:`burpui.misc.auth.ldap.LdapLoader.__exit__` closes the
connection to the LDAP server.
"""
if self.ldap and self.ldap.bound:
self.ldap.unbind()
def fetch(self, searchval=None, uniq=True):
""":func:`burpui.misc.auth.ldap.LdapLoader.fetch` searches for a user
object in the LDAP server.
:param searchval: attribute value to search for
:type searchval: str
:param uniq: only return one result
:type uniq: bool
:returns: dictionary of `distinguishedName` and `commonName` attributes for the
user if found, otherwise None.
"""
try:
if self.filt:
query = self.filt.format(self.attr, searchval)
else:
query = "({0}={1})".format(self.attr, searchval)
self.logger.info("filter: {0} | base: {1}".format(query, self.base))
r = None
with self.ldap:
self.logger.debug("LDAP Connection = {0}".format(str(self.ldap)))
self.ldap.search(self.base, query, attributes=["cn", self.attr])
r = self.ldap.response
if not r:
raise ValueError("no results")
except Exception as e:
self.logger.error("Ooops, LDAP lookup failed: {0}".format(str(e)))
return None
if not uniq:
return r
for record in r:
attrs = record["attributes"]
if self.attr in attrs and searchval in attrs[self.attr]:
self.logger.info("Found DN: {0}".format(record["dn"]))
return {"dn": record["dn"], "cn": attrs["cn"][0]}
def _prefetch(self):
"""Prefetch all users that match the filter/base"""
self.users = []
results = self.fetch("*", False) or []
for record in results:
attrs = record["attributes"]
if self.attr in attrs:
self.users.append(attrs[self.attr][0])
self.logger.debug(self.users)
def check(self, dn=None, passwd=None):
""":func:`burpui.misc.auth.ldap.LdapLoader.check` authenticates a user
against the LDAP server.
:param dn: canonical `dn` of the user to authenticate as
:type dn: str
:param passwd: password of the user to authenticate as
:type passwd: str
:returns: True if bind was successful, otherwise False
"""
try:
with Connection(
self.server,
user="{0}".format(dn),
password=passwd,
raise_exceptions=True,
auto_bind=self.auto_bind,
authentication=SIMPLE,
) as con:
self.logger.debug("LDAP Connection = {0}".format(str(con)))
self.logger.info("Bound as user: {0}".format(dn))
return con.bind()
except Exception as e:
self.logger.error(
"Failed to authenticate user: {0}, {1}".format(dn, str(e))
)
self.logger.error("Bind as '{0}' failed".format(dn))
return False
class UserHandler(BUIhandler):
__doc__ = __(
"Connects to a LDAP database to authenticate users. Handles "
"searching for and binding as."
)
priority = 50
preload_users = False
"""The :class:`burpui.misc.auth.ldap.UserHandler` class maintains a list of
``Burp-UI`` users.
"""
def __init__(self, app=None):
""":func:`burpui.misc.auth.ldap.UserHandler.__init__` creates the
handler instance
:param app: Instance of the app we are running in
:type app: :class:`burpui.engines.server.BUIServer`
"""
self.ldap = LdapLoader(app, self)
self.users = {}
def user(self, name=None):
"""See :func:`burpui.misc.auth.interface.BUIhandler.user`"""
if name not in self.users:
self.users[name] = LdapUser(self.ldap, name)
ret = self.users[name]
if not ret.active:
return AnonymousUserMixin()
return ret<|fim▁hole|> return self.ldap
class LdapUser(BUIuser):
"""The :class:`burpui.misc.auth.ldap.LdapUser` class generates a ``Burp-UI``
user from a user object found in the LDAP server.
"""
def __init__(self, ldap=None, name=None):
""":func:`burpui.misc.auth.ldap.LdapUser.__init__` function finds a user
in the LDAP server and stores the DN of the user if found.
:param ldap: an ``LdapLoader`` instance
:type ldap: :class:`burpui.misc.auth.ldap.LdapLoader`
:param name: login name of the user to find in the LDAP server
:param type: str
"""
self.active = False
self.authenticated = False
self.ldap = ldap
self.name = name
self.backend = self.ldap.name
found = self.ldap.fetch(name)
if found:
self.id = found["dn"]
self.active = True
def login(self, passwd=None):
""":func:`burpui.misc.auth.ldap.LdapUser.login` function finds a user in
the LDAP server and authenticates that user using an LDAP bind.
:param passwd: password to bind to the LDAP server with
:type passwd: str
:returns: True if found and bind was successful;
False if found but bind failed;
otherwise de-activates the user and returns False
"""
if self.ldap.fetch(self.name):
self.authenticated = self.ldap.check(self.id, passwd)
return self.authenticated
else:
self.authenticated = False
self.active = False
return False
def get_id(self):
""":func:`burpui.misc.auth.ldap.LdapUser.get_id` function
:returns: login name of the user
"""
return self.name<|fim▁end|> |
@property
def loader(self): |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.