function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def _get_text_amount(self, amount, currency_id): es_regex = re.compile('es.*') en_regex = re.compile('en.*') lang = self.localcontext.get('lang') if es_regex.match(lang): from openerp.addons.l10n_cr_amount_to_text import amount_to_text return amount_to_text.number_to_text_es( amount, '', join_dec=' Y ', separator=',', decimal_point='.') elif en_regex.match(lang): from openerp.tools import amount_to_text_en return amount_to_text_en.amount_to_text( amount, lang='en', currency='') else: raise Warning(_('Language not supported by this report.'))
ClearCorp/odoo-clearcorp
[ 14, 44, 14, 5, 1400190481 ]
def test_anonymous_user_project_view(self): self.open("") self.wd.wait_for_css(".organizations") self.wd.find_element_by_link_text("Organizations").click() self.wd.wait_for_xpath("//h1[contains(text(), 'Organizations')]") self.wd.find_element_by_link_text( Organization.get_test_org_name()).click() self.wd.wait_for_xpath( "//h2[contains(text(), 'Organization Overview')]") assert self.wd.find_element_by_xpath("//div/section/p")
Cadasta/cadasta-test
[ 1, 7, 1, 18, 1481171870 ]
def test_merge_users_view(self): user1 = create_user(get_student_dict(1)) user2 = create_user(get_student_dict(2)) response = self.c.get('/mergeusers/{0}/{1}/'.format(user1.pk, user2.pk)) self.assertEqual(response.status_code, 200)
troeger/opensubmit
[ 30, 18, 30, 45, 1411388038 ]
def test_test_machine_list_view(self): # one machine given create_test_machine('127.0.0.1') response = self.c.get('/teacher/opensubmit/testmachine/') self.assertEqual(response.status_code, 200)
troeger/opensubmit
[ 30, 18, 30, 45, 1411388038 ]
def _compute_template_task_id(self): for rec in self: rec.template_task_id = self.env['project.task']
ingadhoc/project
[ 8, 42, 8, 7, 1453129449 ]
def onchange_template(self): if not self.template_task_id: return copy_data_default = { 'project_id': self.project_id.id, 'partner_id': self.partner_id.id, 'company_id': self.company_id.id, 'child_ids': False, } data = self.template_task_id.copy_data(copy_data_default) for k, v in data[0].items(): self[k] = v childs = [] copy_data_default['project_id'] = self.subtask_project_id.id for child in self.template_task_id.child_ids: childs.append((0, 0, child.copy_data(copy_data_default)[0])) self.child_ids = childs
ingadhoc/project
[ 8, 42, 8, 7, 1453129449 ]
def _get_photo(self, cr, uid, context=None): photo_path = addons.get_module_resource('res_users_kanban', 'static/src/img', 'default_image.png') return open(photo_path, 'rb').read().encode('base64')
iw3hxn/LibrERP
[ 29, 16, 29, 1, 1402418161 ]
def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context)
iw3hxn/LibrERP
[ 29, 16, 29, 1, 1402418161 ]
def _get_default_image(self, cr, uid, context=None): image_path = addons.get_module_resource('res_users_kanban', 'static/src/img', 'default_image.png') return tools.image_resize_image_big(open(image_path, 'rb').read().encode('base64'))
iw3hxn/LibrERP
[ 29, 16, 29, 1, 1402418161 ]
def AB06_BA08(C, vs30, imt, PGA760): F = np.zeros_like(vs30) F[vs30 >= 760.] = 10**np.interp(np.log10(vs30[vs30 >= 760.]), np.log10([760.0, 2000.0]), np.log10([1.0, C['c']])) F[vs30 >= 760.] = 1./F[vs30 >= 760.] C2 = BA08.COEFFS_SOIL_RESPONSE[imt] nl = _get_site_amplification_non_linear(vs30[vs30 < 760.], PGA760[vs30 < 760.], C2) lin = _get_site_amplification_linear(vs30[vs30 < 760.], C2) F[vs30 < 760.] = np.exp(nl+lin) return F
gem/oq-engine
[ 291, 241, 291, 48, 1277737182 ]
def __init__(self, **kwargs): # kwargs must contain the keys REQUIRES_DISTANCES, # DEFINED_FOR_TECTONIC_REGION_TYPE, gmpe_table fname = kwargs['gmpe_table'] if isinstance(fname, io.BytesIO): # magic happening in the engine when reading the gsim from HDF5 pass else: # fname is really a filename (absolute in the engine) kwargs['gmpe_table'] = os.path.join( BASE_PATH_AA13, os.path.basename(fname)) super().__init__(**kwargs) self.REQUIRES_DISTANCES = frozenset(kwargs['REQUIRES_DISTANCES']) self.DEFINED_FOR_TECTONIC_REGION_TYPE = kwargs[ 'DEFINED_FOR_TECTONIC_REGION_TYPE']
gem/oq-engine
[ 291, 241, 291, 48, 1277737182 ]
def _make_rabbitmq_connection(url): parse_result = urlparse(url) # Parse host & user/password try: (authdata, host) = parse_result.netloc.split("@") except Exception as e: raise RuntimeError("Invalid url") from e try: (user, password) = authdata.split(":") except Exception: (user, password) = ("guest", "guest") vhost = parse_result.path return AmqpConnection(host=host, userid=user, password=password, virtual_host=vhost[1:])
taigaio/taiga-back
[ 5787, 1103, 5787, 191, 1363706040 ]
def __init__(self, url): self.url = url
taigaio/taiga-back
[ 5787, 1103, 5787, 191, 1363706040 ]
def _unique_date_range(self): for record in self: record._unique_date_range_one()
OCA/l10n-spain
[ 180, 456, 180, 97, 1403004087 ]
def set_resolution(self, curvename, value): """ Set resolution for tooth form representation INPUT parameters: curvename : segment of tooth flank (string) one of the following: flank, fillet, tip, root, shaft, width value : new value for number of points to represent segment """ if curvename == 'flank': self.points_flank = value elif curvename == 'fillet': self.points_fillet = value elif curvename == 'tip': self.points_tip = value elif curvename == 'root': self.points_root = value elif curvename == 'shaft': self.points_shaft = value elif curvename == 'width': self.points_width = value
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def _make_unique(self, coords): """ Remove redundant entries from coordinate array INPUT parameter: coords : list of 2d-coordinate points (TColgp_Array1OfPnt2d, pythonOCC) OUTPUT: unique_coords : list of unique coordinates (TColgp_Array1OfPnt2d, pythonOCC) """ # tolerance for comparisons index = None tol = self._tol_default * self.data.get('m_n') # upper and lower index of point-array upper_index = len(coords) lower_index = 0 # remove redundant entries uniques = list() for index in range(lower_index, upper_index): unique = True for unique_point in uniques: if abs(coords[index][0] - unique_point[0]) < tol and abs(coords[index][1] - unique_point[1]) < tol: unique = False if unique: uniques.append([coords[index][0], coords[index][1]]) # copy list entries into coordinate array length_uniques = len(uniques) unique_coords = {} for index in range(lower_index, lower_index + length_uniques): if abs(uniques[index - 1][0]) > tol: unique_x = uniques[index - 1][0] else: unique_x = 0.0 if abs(uniques[index - 1][1]) > tol: unique_y = uniques[index - 1][1] else: unique_y = 0.0 if unique_x and unique_y: unique_coords.update({index: [unique_x, unique_y]}) unique_coords.update({index + 1: [0.0, self.data['d_a'] / 2]}) return unique_coords
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def __init__(self, geardata, flankmods=None): """ Initialization of GearWheel-object Should be overwritten in derived classes INPUT parameter: geardata : data of gear wheel (dictionary) flankmods : data of flank modifications (dictionary) formcoords : list of 2d-coordinate points (list, list(len=2), numeric) """ self.points_shaft = None self.data = deepcopy(geardata) self.modifications = deepcopy(flankmods)
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def set_gear_data(self, geardata): """ Set data-attribute of class, overwrite current value INPUT parameter: geardata : dictionary, containing geometric data of gear for content, see method __init__ """ self.__init__(geardata, self.modifications)
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def get_flank_modifications(self): """ Return modifications-attribute of class OUTPUT: data attribute of class (dictionary) """ return self.modifications
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def update_flank_modifications(self, flankmods): """ Set modifications-attribute of class, update current value INPUT parameter: flankmods : dictionary, containing flank modification data of gear for content, see method __init__ """ tempmods = self.modifications.copy() tempmods.update(flankmods) self.__init__(self.data, tempmods)
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def _tooth_thickness(self, d_y): """ Tooth thickness in transverse cross-section (chord-length) INPUT parameter: d_y : two times coordinate of tooth flank in radial direction (diameter of y-cylinder) OUTPUT: s_y : chord length of tooth thickness at d_y (numeric) d_yc : cutting point of diameter through tooth center and chord (numeric) """ # necessary due to numerical rounding errors if self.data.get('d') / d_y * cos(radians(self.data.get('alpha_t'))) > 1.0: alpha_yt = 0.0 else: alpha_yt = degrees(acos(self.data.get('d') / d_y * cos(radians(self.data.get('alpha_t'))))) s_yt = d_y * ( (pi + 4 * self.data.get('x_E') * tan(radians(self.data.get('alpha_n')))) / 2 / self.data.get( 'z') + inv(self.data.get('alpha_t')) - inv(alpha_yt)) s_y = d_y * (sin(s_yt / d_y)) # tooth thickness (chord-length) d_yc = d_y * (cos(s_yt / d_y)) # diameter at center of tooth (cut with chord) return s_y, d_yc
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def __init__(self, geardata, flankmods=None, formcoords=None): """ Initialization of GearWheel-object. All parameters in accordance to DIN 3960 and DIN 3967. INPUT parameters: z : number of teeth (numeric, integer) m_n : normal module (numeric, positive) d : pitch diameter (numeric) two of the three parameters z, m_n, d, must be supplied b : tooth width (numeric, positive) d_f : root circle diameter (numeric) optional - calculated if not supplied d_a : tip diameter (numeric) optional - calculated if not supplied d_Ff : root form diameter (numeric) optional - will be estimated if not supplied d_Fa : tip form diameter (numeric) optional - set equal da if not supplied (no chamfer) rho_f : fillet radius (numeric) optional - set equal 0.38*mn if not supplied x : addendum modification factor (numeric) optional - set equal 0.0 if not supplied alpha_n : pressure angle (numeric, positive)[degrees] optional - set equal 20.0 if not supplied beta : helix angle (numeric)[degrees] optional - set equal 0.0 if not supplied a : addendum (numeric) optional - no estimation c : tip clearance (numeric, positive, 0.1...0.3*mn) optional - set equal 0.25*mn if not supplied alpha_wt : service pressure angle (numeric, positive)[degrees] optional - calculated from z_2 or d_w d_w : service pitch diameter (numeric) optional - calculated from alpha_wt or z_2 h_k : radial size of tip chamfer (numeric) optional - set equal d_a-d_Fa or 0.0 if not supplied s_aK : remaining tooth thickness at tip, chord-length (numeric) optional - set equal s_a-2*h_k if not supplied z_2 : number of teeth of counter gear (numeric, integer) optional - calculated from alpha_wt or d_w d_s : shaft diameter, inner gear wheel diameter (numeric) optional - set equal 0.0 if not supplied A_s : tooth thickness allowance in normal cross-section (numeric, negative) optional - set equal 0.0 if not supplied All input parameters above are arranged in a dictionary. The keys are the names of the parameters as listed above. formcoords : 2D cartesian coordinates of points on the toothflank, describing a half tooth (TColgp_Array1OfPnt2d, pythonOCC) There are several possibilities for defining a complete gearwheel: 1) z, m_n, b, (beta), formcoords 2) z, m_n, b, (beta), d_f, d_a, d_Ff, d_Fa, rho_f 3) z, m_n, b, (beta), alpha_n, alpha_wt, x, a, rho_f 4) z, m_n, b, (beta), alpha_n, z_2, x, a, rho_f Some parameters can be left out, but the result might differ from your real gear. Missing parameters are estimated if possible. The helix angle beta doesn't have to be supplied for a spur gear. The constructor does not check for unit consistency. The user is responsible for supplying all values with consistent units. """ super(CylindricalGearWheel, self).__init__(geardata) self.data = deepcopy(geardata) self.modifications = deepcopy(flankmods) # form coordinates: value check (at least two points for defining a # tooth form (straight flanks) and two coordinates per point) if formcoords: self.data.update(self._analyze_formcoords()) # module: value check if 'm_n' in self.data and not self.data.get('m_n') >= 0: raise ValueError('module non-positive') if 'beta' not in self.data: self.data.update({'beta': self._beta_default}) self.data.update({'alpha_t': degrees( atan(tan(radians(self.data.get('alpha_n'))) / cos(radians(self.data.get('beta')))))}) self.data.update({'s_p': (pi * self.data['m_n'] / 2) + 2 * self.data['m_n'] * self.data['x'] * tan( radians(self.data['alpha_n']))}) if 'tau' in self.data and 'z' not in self.data: self.data.update({'z': int(2 * pi / self.data.get('tau'))}) if 'z' in self.data and 'm_n' in self.data: self.data.update( {'d': self.data.get('m_n') * self.data.get('z') / cos(radians(self.data.get('beta')))}) elif 'z' in self.data and 'd' in self.data: self.data.update( {'m_n': self.data.get('d') * cos(radians(self.data.get('beta'))) / self.data.get('z')}) elif 'm_n' in self.data and 'd' in self.data: self.data.update({ 'z': int(self.data.get('d') * cos(radians(self.data.get('beta'))) / self.data.get('m_n'))}) else: raise AttributeError('insufficient data supplied') if 'tau' not in self.data: self.data.update({'tau': degrees(2 * pi / self.data.get('z'))}) isexternal = sign(self.data.get('z')) if not sign(self.data.get('d')) == isexternal: raise ValueError('sign of pitch diameter') self.data.update({'m_t': self.data.get('m_n') / cos(radians(self.data.get('beta')))}) if 'alpha_n' in self.data: if self.data.get('alpha_n') < 0: raise ValueError('pitch angle non-positive') else: self.data.update({'alpha_n': self._alpha_n_default}) if 'x' not in self.data: self.data.update({'x': self._x_default}) if 'A_s' not in self.data: self.data.update({'A_s': self._A_s_default}) # tooth thickness allowance: value check else: if not self.data.get('A_s') <= 0: raise ValueError('tooth thickness allowance positive') self.data.update({'x_E': self.data.get('x') + self.data.get('A_s') / 2 / tan( radians(self.data.get('alpha_n'))) / self.data.get('m_n')}) if 'd_w' in self.data and 'alpha_wt' not in self.data: if not sign(self.data.get('d_w')) == isexternal: raise ValueError('sign of service pitch diameter') self.data.update({'alpha_wt': degrees(acos( self.data.get('d') / self.data.get('d_w') * cos(radians(self.data.get('alpha_t')))))}) if 'alpha_wt' in self.data and 'd_w' not in self.data: self.data.update({'d_w': self.data.get('d') * cos(radians(self.data.get('alpha_t'))) / cos( radians(self.data.get('alpha_wt')))}) self.data.update({'d_b': self.data.get('d') * cos(radians(self.data.get('alpha_t')))}) if formcoords: self.data.update(self._analyze_formcoords()) if not formcoords: # tip clearance: value check, set to default if not supplied if 'c' in self.data: if self.data.get('c') < 0.1 * self.data.get('m_n') or self.data.get('c') > 0.3 * self.data.get( 'm_n'): raise ValueError('tip clearance out of bounds') else: self.data.update({'c': self._c_default * self.data.get('m_n')}) # fillet radius: value check, set to default if not supplied if 'rho_f' not in self.data: self.data.update({'rho_f': self._rho_f_default * self.data.get('m_n')}) else: if self.data.get('rho_f') < 0: raise ValueError('fillet radius negative') # CAUTION: THE FOLLOWING SECTION OF CODE WILL BE REMOVED IN FUTURE RELEASES! # tool fillet radius: value check if 'rho_fP' in self.data: if self.data.get('rho_fP') < 0: raise ValueError('tool fillet radius negative') if not self.data.get('beta') == 0: raise ValueError('fillet trochoid cannot be generated for helical gears') # END OF CODE SECTION TO BE REMOVED # calculate tip height modification factor if possible (else set to default) # various attempts are made if 'a' in self.data and 'k' not in self.data: self.data.update( {'a_d': self.data.get('m_t') * (self.data.get('z') + self.data.get('z_2')) / 2}) self.data.update({'k': (self.data.get('a') - self.data.get('a_d')) / self.data.get('m_n') - ( self.data.get('x') + self.data.get('x_2'))}) else: self.data.update({'k': self._k_default}) # root circle diameter: value check, calculate if not supplied if 'd_f' in self.data: if 'd_f' in self.data > 'd' in self.data: raise ValueError('root circle diameter greater than pitch diameter') if not sign(self.data.get('d_f')) == isexternal: raise ValueError('sign of root circle diameter') else: self.data.update({ 'd_f': self.data.get('d') + 2 * self.data.get('x_E') * self.data.get('m_n') - 2 * ( self.data.get('m_n') + self.data.get('c'))}) # tip diameter: value check, calculate if not supplied if 'd_a' in self.data: # if self.data.get('d_a')<self.data.get('d'): # raise ValueError, 'tip diameter less than pitch diameter' if not sign(self.data.get('d_a')) == isexternal: raise ValueError('sign of tip diameter') else: self.data.update({ 'd_a': self.data.get('d') + 2 * self.data.get('x') * self.data.get('m_n') + 2 * self.data.get( 'm_n') + 2 * self.data.get('k') * self.data.get('m_n')}) # radial value of tip chamfer: value check, calculate or set to default # if not supplied if 'h_k' in self.data: if self.data.get('h_k') < 0: raise ValueError('value of tip chamfer negative') elif 'd_Fa' in self.data: self.data.update({'h_k': abs(self.data.get('d_a') - self.data.get('d_Fa')) / 2}) else: self.data.update({'h_k': self._h_k_default}) # remaining tooth thickness: value check, set to default if not supplied s_a, d_ac = self._tooth_thickness(self.data.get('d_a')) if 's_aK' not in self.data: self.data.update({'s_aK': s_a - 2 * self.data.get('h_k')}) if self.data.get('s_aK') < 0: raise ValueError('remaining tooth thickness at tip negative') if self.data.get('s_aK') > s_a: raise ValueError('remaining tip tooth thickness greater than tooth thickness') if 'd_Ff' in self.data: if self.data.get('d_Ff') > self.data.get('d'): raise ValueError('root form diameter greater than pitch diameter') if self.data.get('d_Ff') < self.data.get('d_f'): raise ValueError('root form diameter less than root circle diameter') if not sign(self.data.get('d_Ff')) == isexternal: raise ValueError('sign of root form diameter') # tip form diameter: value check if 'd_Fa' in self.data: if self.data.get('d_Fa') < self.data.get('d'): raise ValueError('tip form diameter less than pitch diameter') if self.data.get('d_Fa') > self.data.get('d_a'): raise ValueError('tip form diameter greater than tip diameter') if not sign(self.data.get('d_Fa')) == isexternal: raise ValueError('sign of tip form diameter') else: self.data.update({'d_Fa': self.data.get('d_a') - 2 * self.data.get('h_k')}) if 'd_s' not in self.data: self.data.update({'d_s': self._d_s_default}) if abs(self.data.get('d_s')) > self._tol_default: if not sign(self.data.get('d_s')) == isexternal: raise ValueError('sign of shaft diameter') if not self.data.get('d_s') < self.data.get('d_f'): raise ValueError('shaft diameter greater than root circle diameter') if not self.formcoords: self._make_form_coords() else: self.formcoords = self._make_unique(self.formcoords)
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def root_circle_center_func(phi): return fil_end_point + self.data.get('rho_f') * np.array([sin(phi[0]), cos(phi[0])]) - (self.data.get( 'd_f') / 2 + self.data.get('rho_f')) * np.array([sin(phi[1]), cos(phi[1])])
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def __init__(self, pairdata): """ Initialization of GearPair-object Should be overwritten in derived classes :rtype : object INPUT parameters: pairdata : data of gear wheel pair (dictionary) Pinion : pinion (GearWheel-instance) Gear : gear (GearWheel-instance) """ self.data = deepcopy(pairdata) gear = {'z': self.data['z'], 'x': self.data['x'], 'alpha_n': self.data['alpha_n'], 'beta': self.data['beta'], 'm_n': self.data['m_n'], 'rho_f': self.data['rho_f'], 'd_s': self.data['d_s'], 'c': self.data['c'], 'b': self.data['b']} self.gear = self.__set_gear(gear)
efirvida/python-gearbox
[ 17, 12, 17, 1, 1429804194 ]
def __str__(self): return "Id2"
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_empty_volume(self): """Empty volume is well-behaved""" empty = volume.EmptyVolume() test = 0.1 assert_equal(empty(test), False) assert_equal((empty & volA)(test), False) assert_equal((volA & empty)(test), False) assert_equal((empty | volA)(test), True) assert_equal((volA | empty)(test), True) assert_equal((empty & volA).__str__(), "empty") # assert_is: logical combos with empty should return same obj assert_is((empty - volA), empty) assert_is((volA - empty), volA) assert_is((volA | empty), volA) assert_is((empty | volA), volA) assert_is((volA & empty), empty) assert_is((empty & volA), empty) assert_is((empty ^ volA), volA) assert_is((volA ^ empty), volA) assert_equal((~ empty).__str__(), "all")
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_full_volume(self): """Full volume is well-behaved""" full = volume.FullVolume() test = 0.1 assert_equal(full(test), True) assert_equal((full & volA)(test), True) assert_equal((volA & full)(test), True) assert_equal((full | volA)(test), True) assert_equal((volA | full)(test), True) # assert_is: logical combos with full should return same obj assert_is((full & volA), volA) assert_is((volA & full), volA) assert_is((full | volA), full) assert_is((volA | full), full) assert_equal((volA - full), volume.EmptyVolume()) assert_equal((full - volA), ~ volA) assert_equal((full ^ volA), ~ volA) assert_equal((volA ^ full), ~ volA) assert_equal((volA | full).__str__(), "all") assert_equal((~ full).__str__(), "empty")
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_upper_boundary(self): assert_equal(volA(0.49), True) assert_equal(volA(0.50), False) assert_equal(volA(0.51), False)
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_negation(self): assert_equal((~volA)(0.25), False) assert_equal((~volA)(0.75), True) assert_equal((~volA)(0.5), True) assert_equal((~volA)(-0.5), False)
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_and_combinations(self): assert_equal((volA & volB), volume.CVDefinedVolume(op_id, 0.25, 0.5)) assert_equal((volA & volB)(0.45), True) assert_equal((volA & volB)(0.55), False) assert_equal((volB & volC), volume.EmptyVolume()) # go to VolumeCombination if order parameter isn't the same assert_equal((volA & volA2), volume.VolumeCombination(volA, volA2, lambda a, b: a and b, '{0} and {1}') )
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_xor_combinations(self): assert_equal((volA ^ volB), volume.UnionVolume( volume.CVDefinedVolume(op_id, -0.5, 0.25), volume.CVDefinedVolume(op_id, 0.5, 0.75) )) assert_equal((volA ^ volA2), volume.SymmetricDifferenceVolume(volA, volA2))
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_str(self): assert_equal(volA.__str__(), "{x|Id(x) in [-0.5, 0.5]}") assert_equal((~volA).__str__(), "(not {x|Id(x) in [-0.5, 0.5]})")
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def _vol_for_cv_type(inp): if not HAS_SIMTK_UNIT and inp == 'simtk': pytest.skip() func = { 'float': lambda s: 1.0, 'array': lambda s: np.array([1.0, 2.0]), 'array1': lambda s: np.array([1.0]), 'simtk': None }[inp] if func is None: # only if inp is 'simtk' func = lambda s: 1.0 * unit.nanometers cv = paths.FunctionCV('cv', func) volume = paths.CVDefinedVolume(cv, 0.0, 1.5) return volume
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_is_iterable(self, inp): snap = make_1d_traj([0.0])[0] volume = self._vol_for_cv_type(inp) val = volume.collectivevariable(snap) expected = inp in ['array', 'array1'] if expected: with pytest.warns(UserWarning, match="returns an iterable"): result = volume._is_iterable(val) else: result = volume._is_iterable(val) assert result is expected
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_get_cv_float(self, inp): snap = make_1d_traj([0.0])[0] volume = self._vol_for_cv_type(inp) val = volume._get_cv_float(snap) expected = inp in ['float', 'array1'] assert isinstance(val, float) is expected
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def setup(self): self.pvolA = volume.PeriodicCVDefinedVolume(op_id, -100, 75) self.pvolA_ = volume.PeriodicCVDefinedVolume(op_id, 75, -100) self.pvolB = volume.PeriodicCVDefinedVolume(op_id, 50, 100) self.pvolC = volume.PeriodicCVDefinedVolume(op_id, -100, -50) self.pvolD = volume.PeriodicCVDefinedVolume(op_id, -100, 100) self.pvolE = volume.PeriodicCVDefinedVolume(op_id, -150, 150)
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_normal_implicit(self): """min<max, no periodic domain defined""" lambda_min = -150 lambda_max = 70 vol = volume.PeriodicCVDefinedVolume(op_id, lambda_min, lambda_max) assert_equal(vol.__str__(), "{x|Id(x) [periodic] in [-150, 70]}") # out of state assert_equal(False, vol(lambda_min-1.0)) assert_equal(False, vol(lambda_max+1.0)) # in state assert_equal(True, vol(lambda_min+1.0)) assert_equal(True, vol(lambda_max-1.0)) # border assert_equal(True, vol(lambda_min)) assert_equal(False, vol(lambda_max))
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_wrapped_volume_implicit(self): """max<min, no periodic domain defined""" lambda_min = 70 lambda_max = -150 vol = volume.PeriodicCVDefinedVolume(op_id, lambda_min, lambda_max) # out of state assert_equal(False, vol(lambda_min-1.0)) assert_equal(False, vol(lambda_max+1.0)) # in state assert_equal(True, vol(lambda_min+1.0)) assert_equal(True, vol(lambda_max-1.0)) # border assert_equal(True, vol(lambda_min)) assert_equal(False, vol(lambda_max))
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_volume_bigger_than_bounds(self): '''max-min > pbc_range raises Exception''' vol = volume.PeriodicCVDefinedVolume(op_id, 90, 720, -180, 180)
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_periodic_and_combos(self): assert_equal((self.pvolA & self.pvolB), volume.PeriodicCVDefinedVolume(op_id, 50, 75)) assert_equal((self.pvolA & self.pvolB)(60), True) assert_equal((self.pvolA & self.pvolB)(80), False) assert_equal((self.pvolB & self.pvolC), volume.EmptyVolume()) assert_equal((self.pvolC & self.pvolB), volume.EmptyVolume()) assert_is((self.pvolA & self.pvolA), self.pvolA) assert_equal((self.pvolA & self.pvolA_), volume.EmptyVolume()) assert_equal((self.pvolE & self.pvolD), self.pvolD) # go to special case for cyclic permutation assert_equal((self.pvolB & self.pvolD), self.pvolB) # go to special case assert_equal((self.pvolE & self.pvolA_), volume.UnionVolume( volume.PeriodicCVDefinedVolume(op_id, -150,-100), volume.PeriodicCVDefinedVolume(op_id, 75, 150) ) ) # go to super if needed assert_equal(type(self.pvolA & volA), volume.IntersectionVolume)
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_periodic_xor_combos(self): assert_equal(self.pvolA ^ self.pvolA_, volume.FullVolume()) assert_equal(self.pvolA ^ self.pvolA, volume.EmptyVolume()) assert_equal(self.pvolE ^ self.pvolD, volume.UnionVolume( volume.PeriodicCVDefinedVolume(op_id, -150, -100), volume.PeriodicCVDefinedVolume(op_id, 100, 150))) assert_equal(self.pvolB ^ self.pvolC, self.pvolB | self.pvolC) assert_equal(self.pvolB ^ self.pvolD, volume.PeriodicCVDefinedVolume(op_id, -100, 50))
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def test_periodic_sub_combos(self): assert_equal(self.pvolA - self.pvolA_, self.pvolA) assert_equal(self.pvolA_ - self.pvolA, self.pvolA_) assert_equal(self.pvolD - self.pvolA, volume.PeriodicCVDefinedVolume(op_id, 75, 100)) assert_equal((self.pvolD - self.pvolA)(80), True) assert_equal((self.pvolD - self.pvolA)(50), False) assert_equal(self.pvolB - self.pvolC, self.pvolB) assert_equal(self.pvolA - self.pvolA, volume.EmptyVolume()) assert_equal(self.pvolE - self.pvolD, volume.UnionVolume( volume.PeriodicCVDefinedVolume(op_id, -150, -100), volume.PeriodicCVDefinedVolume(op_id, 100, 150))) assert_equal(self.pvolE - self.pvolA_, volume.PeriodicCVDefinedVolume(op_id, -100, 75))
choderalab/openpathsampling
[ 87, 48, 87, 102, 1378303588 ]
def timestamp(): if on_windows: rv = "%.6f" % ( (datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds(),) else: rv = datetime.datetime.now().strftime("%s.%f") return rv
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def code2string(code): return "\n".join( ["%-4s %s" % (li+1, l) for li, l in enumerate(code.splitlines())])
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def _store_return_value(func, queue): while True: queue.put(func())
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def __init__(self, ns): self.ns = ns self._on_disconnect = [] self._on_drop = []
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def local_nss(self): """List local namespaces """ return _g_local_namespaces.keys()
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def on_disconnect(self): """Return codes that will be executed when a client has disconnected. """ return self._on_disconnect
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def exec_on_disconnect(self, code, any_connection=False): """Add code that will be executed when client has disconnected. """ if not any_connection: conn_id = _g_executing_pythonshare_conn_id else: conn_id = None self._on_disconnect.append((conn_id, code))
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def set_on_disconnect(self, list_of_code): """Replace all "on disconnect" codes with new list of codes. """ self._on_disconnect = list_of_code
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def call_on_disconnect(self, conn_id): for setter_conn_id, code in self._on_disconnect: if not setter_conn_id or setter_conn_id == conn_id: exec_msg = messages.Exec(self.ns, code, None) if opt_debug: daemon_log("on disconnect %s: %s" % (conn_id, exec_msg,)) rv = _local_execute(exec_msg) if opt_debug: daemon_log("on disconnect rv: %s" % (rv,)) if setter_conn_id == conn_id: self._on_disconnect.remove((conn_id, code))
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def read_rv(self, async_rv): """Return and remove asynchronous return value. """ if self.ns != async_rv.ns: raise ValueError("Namespace mismatch") if (async_rv.ns in _g_async_rvs and async_rv.rvid in _g_async_rvs[async_rv.ns]): rv = _g_async_rvs[async_rv.ns][async_rv.rvid] if not isinstance(rv, pythonshare.InProgress): del _g_async_rvs[async_rv.ns][async_rv.rvid] return rv else: raise ValueError('Invalid return value id: "%s"' % (async_rv.rvid,))
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def __init__(self, conn, to_remote, from_remote): self.conn = conn self.to_remote = to_remote self.from_remote = from_remote
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def _init_local_namespace(ns, init_code=None, force=False): if not ns in _g_local_namespaces: if opt_allow_new_namespaces or force: daemon_log('added local namespace "%s"' % (ns,)) _g_local_namespaces[ns] = { "pythonshare_ns": Pythonshare_ns(ns), "Async_rv": pythonshare.messages.Async_rv } _g_local_namespace_locks[ns] = thread.allocate_lock() _g_async_rvs[ns] = {} else: raise ValueError('Unknown namespace "%s"' % (ns,)) if init_code != None: if isinstance(init_code, basestring): try: exec init_code in _g_local_namespaces[ns] except Exception, e: daemon_log('namespace "%s" init error in <string>:\n%s\n\n%s' % ( ns, code2string(init_code), exception2string(sys.exc_info()))) elif isinstance(init_code, dict): # Directly use the dictionary (locals() or globals(), for # instance) as a Pythonshare namespace. clean_ns = _g_local_namespaces[ns] _g_local_namespaces[ns] = init_code _g_local_namespaces[ns].update(clean_ns) # copy pythonshare defaults else: raise TypeError("unsupported init_code type")
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def _drop_remote_namespace(ns): daemon_log('drop remote namespace "%s"' % (ns,)) try: rns = _g_remote_namespaces[ns] del _g_remote_namespaces[ns] rns.__del__() except KeyError: pass # already dropped # send notification to all connections in _g_namespace_exports[ns]?
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def _register_exported_namespace(ns, conn): if not ns in _g_namespace_exports: _g_namespace_exports[ns] = [] _g_namespace_exports[ns].append(conn)
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def _local_async_execute(async_rv, exec_msg): exec_rv = _local_execute(exec_msg) _g_async_rvs[exec_msg.namespace][async_rv.rvid] = exec_rv
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def _remote_execute_and_forward(ns, exec_msg, to_client, peername=None): """returns (forward_status, info) forward_status values: True: everything successfully forwarded, info contains pair (forwarded byte count, full length). False: not everything forwarded, info contains pair (forwarded byte count, full length). to_client file/socket is not functional. None: no forwarding, info contains Exec_rv that should be sent normally. Raises EOFError if connection to remote namespace is not functional. The peername parameter is used for logging only. """ client_supports_rv_info = exec_msg.recv_cap_data_info() exec_msg.set_recv_cap_data_info(True) rns = _g_remote_namespaces[ns] from_remote = rns.from_remote # Must keep simultaneously two locks: # - send lock on to_client # - recv lock on from_remote pythonshare._acquire_recv_lock(from_remote) try: pythonshare._send(exec_msg, rns.to_remote) response = pythonshare._recv(from_remote, acquire_recv_lock=False) if not isinstance(response, messages.Data_info): # Got direct response without forward mode return (None, response) pythonshare._acquire_send_lock(to_client) if client_supports_rv_info: # send data_info to client pythonshare._send(response, to_client, acquire_send_lock=False) try: if opt_debug and peername: daemon_log("%s:%s <= Exec_rv([forwarding %s B])" % (peername + (response.data_length,))) forwarded_bytes = pythonshare._forward( from_remote, to_client, response.data_length, acquire_recv_lock=False, acquire_send_lock=False) if forwarded_bytes == response.data_length: return (True, (forwarded_bytes, response.data_length)) else: return (False, (forwarded_bytes, response.data_length)) finally: pythonshare._release_send_lock(to_client) finally: exec_msg.set_recv_cap_data_info(client_supports_rv_info) pythonshare._release_recv_lock(from_remote)
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def _serve_connection(conn, conn_opts): global _g_async_rv_counter global _g_server_shutdown if isinstance(conn, client.Connection): to_client = conn._to_server from_client = conn._from_server else: # conn is a connected socket to_client = conn.makefile("w") from_client = conn.makefile("r") try: peername = conn.getpeername() except socket.error: peername = ("unknown", "?") if opt_debug: daemon_log("connected %s:%s" % peername) conn_id = "%s-%s" % (timestamp(), id(conn)) auth_ok = False passwords = [k for k in conn_opts.keys() if k.startswith("password.")] kill_server_on_close = conn_opts.get("kill-server-on-close", False) if passwords: # password authentication is required for this connection try: received_password = pythonshare._recv(from_client) except Exception, e: daemon_log('error receiving password: %r' % (e,)) received_password = None for password_type in passwords: algorithm = password_type.split(".")[1] if type(received_password) == str: if (algorithm == "plaintext" and received_password == conn_opts[password_type]): auth_ok = True elif (hasattr(hashlib, algorithm) and getattr(hashlib, algorithm)(received_password).hexdigest() == conn_opts[password_type]): auth_ok = True try: if auth_ok: pythonshare._send(messages.Auth_rv(True), to_client) if opt_debug: daemon_log("%s:%s authentication ok" % peername) elif not received_password is None: pythonshare._send(messages.Auth_rv(False), to_client) if opt_debug: daemon_log("%s:%s authentication failed" % peername) except socket.error: daemon_log("authentication failed due to socket error") auth_ok = False else: auth_ok = True # no password required whitelist_local = conn_opts.get("whitelist_local", None) while auth_ok: try: obj = pythonshare._recv(from_client) if opt_debug: daemon_log("%s:%s => %s" % (peername + (obj,))) except (EOFError, pythonshare.socket.error): break if isinstance(obj, messages.Register_ns): try: _init_remote_namespace(obj.ns, conn, to_client, from_client) pythonshare._send(messages.Ns_rv(True), to_client) # from this point on, this connection is reserved for # sending remote namespace traffic. The connection will be # used by other threads, this thread stops here. return except Exception, e: pythonshare._send(messages.Ns_rv(False, exception2string(sys.exc_info())), to_client) elif isinstance(obj, messages.Drop_ns): try: if obj.ns in _g_local_namespaces: _drop_local_namespace(obj.ns) elif obj.ns in _g_remote_namespaces: _drop_remote_namespace(obj.ns) else: raise ValueError('Unknown namespace "%s"' % (obj.ns,)) pythonshare._send(messages.Ns_rv(True), to_client) except Exception, e: if opt_debug: daemon_log("namespace drop error: %s" % (e,)) pythonshare._send(messages.Ns_rv(False, exception2string(sys.exc_info())), to_client) elif isinstance(obj, messages.Request_ns): ns = obj.ns if (ns in _g_remote_namespaces or ns in _g_local_namespaces): _register_exported_namespace(ns, conn) pythonshare._send(messages.Ns_rv(True), to_client) # from this point on, this connection is reserved for # receiving executions on requested namespace. This # thread starts serving the connection. elif isinstance(obj, messages.Exec): ns = obj.namespace if ns in _g_remote_namespaces: # execute in remote namespace try: _fwd_status, _fwd_info = _remote_execute_and_forward( ns, obj, to_client, peername) if _fwd_status == True: # successfully forwarded if opt_debug: daemon_log("%s:%s forwarded %s B" % (peername + (_fwd_info[0],))) exec_rv = None # return value fully forwarded elif _fwd_status == False: # connection to client is broken if opt_debug: daemon_log("%s:%s error after forwarding %s/%s B" % (peername + _fwd_info)) break elif _fwd_status is None: # nothing forwarded, send return value by normal means exec_rv = _fwd_info except (EOFError, socket.error): daemon_log('connection lost to "%s"' % (ns,)) _drop_remote_namespace(ns) break else: # execute in local namespace if whitelist_local == None or ns in whitelist_local: _init_local_namespace(ns) if obj.async: # asynchronous execution, return handle (Async_rv) _g_async_rv_counter += 1 rvid = timestamp() + str(_g_async_rv_counter) exec_rv = messages.Async_rv(ns, rvid) _g_async_rvs[ns][rvid] = pythonshare.InProgress() thread.start_new_thread(_local_async_execute, (exec_rv, obj)) else: # synchronous execution, return true return value exec_rv = _local_execute(obj, conn_id) if not exec_rv is None: if opt_debug: daemon_log("%s:%s <= %s" % (peername + (exec_rv,))) try: try: if obj.recv_cap_data_info(): info = pythonshare._send_opt(exec_rv, to_client, obj.recv_caps) if info: sent_info = " %s B, format:%s" % ( info.data_length, info.data_format) else: sent_info = "" else: pythonshare._send(exec_rv, to_client) sent_info = "" if opt_debug: daemon_log("%s:%s sent%s" % (peername + (sent_info,))) except (EOFError, socket.error): break except (TypeError, ValueError, cPickle.PicklingError): # pickling rv fails exec_rv.expr_rv = messages.Unpicklable(exec_rv.expr_rv) try: pythonshare._send(exec_rv, to_client) except (EOFError, socket.error): break elif isinstance(obj, messages.Server_ctl): if obj.command == "die": ns = obj.args[0] if ns in _g_remote_namespaces: try: rv = _remote_execute(ns, obj) if opt_debug: daemon_log("%s:%s <= %s" % (peername + (rv,))) pythonshare._send(rv, to_client) except (EOFError, socket.error): # connection lost daemon_log('connection lost to "%s"' % (ns,)) _drop_remote_namespace(ns) break else: _g_server_shutdown = True server_ctl_rv = messages.Server_ctl_rv(0, "shutting down") pythonshare._send(server_ctl_rv, to_client) if _g_wake_server_function: _g_wake_server_function() break elif obj.command == "unlock": try: ns = obj.args[0] if ns in _g_remote_namespaces: try: rv = _remote_execute(ns, obj) except (EOFError, socket.error): # connection lost daemon_log('connection lost to "%s"' % (ns,)) _drop_remote_namespace(ns) break elif ns in _g_local_namespace_locks: try: _g_local_namespace_locks[ns].release() server_ctl_rv = messages.Server_ctl_rv( 0, "%s unlocked" % (repr(ns),)) except thread.error, e: server_ctl_rv = messages.Server_ctl_rv( 1, "%s already unlocked" % (repr(ns),)) elif ns in _g_local_namespaces: server_ctl_rv = messages.Server_ctl_rv( 2, "namespace %s is not locked" % (repr(ns),)) else: server_ctl_rv = messages.Server_ctl_rv( -1, "unknown namespace %s" % (repr(ns),)) if opt_debug: daemon_log("%s:%s <= %s" % (peername + (server_ctl_rv,))) pythonshare._send(server_ctl_rv, to_client) except Exception, e: if opt_debug: daemon_log("Exception in handling %s: %s" % (obj, e)) else: daemon_log("unknown message type: %s in %s" % (type(obj), obj)) pythonshare._send(messages.Auth_rv(False), to_client) auth_ok = False if opt_debug: daemon_log("disconnected %s:%s" % peername) _connection_lost(conn_id, to_client, from_client, conn) if kill_server_on_close: _g_server_shutdown = True if _g_wake_server_function: _g_wake_server_function()
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def wake_server_function(): _g_waker_lock.release() # wake up server
01org/fMBT
[ 125, 72, 125, 3, 1340117584 ]
def assertExecutionSucceeded(self, exec_result): returncode, stdout, stderr = exec_result self.assertEquals(0, returncode)
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_lvcreate_is_mocked(self): executable_injector = mock.Mock() lvsubsystem = lvmlib.LVSubsystem(None, executable_injector) self.assertTrue( mock.call('/usr/sbin/lvcreate', lvsubsystem.fake_lvcreate) in executable_injector.mock_calls )
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_dmsetup_is_mocked(self): executable_injector = mock.Mock() lvsubsystem = lvmlib.LVSubsystem(None, executable_injector) self.assertTrue( mock.call('/sbin/dmsetup', lvsubsystem.fake_dmsetup) in executable_injector.mock_calls )
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_add_multiple_volume_groups(self): lvsubsystem = lvmlib.LVSubsystem(None, mock.Mock()) lvsubsystem.add_volume_group('vg1') lvsubsystem.add_volume_group('vg2') lvsubsystem.add_volume_group('vg3') vg1 = lvsubsystem.get_volume_group('vg1') vg2 = lvsubsystem.get_volume_group('vg2') vg3 = lvsubsystem.get_volume_group('vg3') self.assertEquals('vg1', vg1.name) self.assertEquals('vg2', vg2.name) self.assertEquals('vg3', vg3.name)
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_fake_lvcreate_with_tags(self): lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock()) lvsubsystem.add_volume_group('vg') exec_result = lvsubsystem.fake_lvcreate( "someprog -n name --addtag tagg -L 100 vg".split(), '') lv, = lvsubsystem.get_logical_volumes_with_name('name') self.assertEquals('tagg', lv.tag)
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_fake_lvcreate_non_zeroed(self): lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock()) lvsubsystem.add_volume_group('vg') exec_result = lvsubsystem.fake_lvcreate( "someprog -n name --zero n -L 100 vg".split(), '') lv, = lvsubsystem.get_logical_volumes_with_name('name') self.assertFalse(lv.zeroed) self.assertExecutionSucceeded(exec_result)
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_fake_lvcreate_called_with_wrong_params(self): lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock()) lvsubsystem.add_volume_group('vg') exec_result = lvsubsystem.fake_lvcreate( "someprog --something-stupid -n name n -L 100 vg".split(), '') self.assertExecutionFailed(exec_result)
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_fake_lvremove(self): lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock()) lvsubsystem.add_volume_group('vg') lvsubsystem.get_volume_group('vg').add_volume('lv', 100) exec_result = lvsubsystem.fake_lvremove( "someprog vg/lv".split(), '') self.assertExecutionSucceeded(exec_result)
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def test_fake_lvremove_with_bad_params(self): lvsubsystem = lvmlib.LVSubsystem(mock.Mock(), mock.Mock()) lvsubsystem.add_volume_group('vg') lvsubsystem.get_volume_group('vg').add_volume('lv', 100) exec_result = lvsubsystem.fake_lvremove( "someprog -f vg/lv --stupid-parameter".split(), '') self.assertExecutionFailed(exec_result)
xapi-project/sm
[ 19, 82, 19, 27, 1371215820 ]
def setUpModule(): ''' Set up the module for running tests. ''' # Set the nonce store to the Django store after saving the current settings # so they can be restored later. global __old_nonce_settings __old_nonce_settings = (settings.NONCE_STORE, settings.NONCE_STORE_ARGS) settings.NONCE_STORE = 'wsse.server.django.wsse.store.DjangoNonceStore' settings.NONCE_STORE_ARGS = []
PrincetonUniversity/pywsse
[ 7, 3, 7, 1, 1472563613 ]
def setUp(self): ''' Set up the test cases. ''' self.user = User.objects.create(username = 'username') self.user_secret = UserSecret.objects.create(user = self.user) self.auth = WSSEAuth('username', self.user_secret.secret) self.base_url = '{}{}'.format(self.live_server_url, self.endpoint)
PrincetonUniversity/pywsse
[ 7, 3, 7, 1, 1472563613 ]
def test_auth_reuse(self): ''' Reuse the same authentication handler. Both requests should succeed. ''' response_a = requests.get(self.base_url, auth = self.auth) response_b = requests.get(self.base_url, auth = self.auth) self.assertEqual(response_a.status_code, status.HTTP_200_OK) self.assertEqual(response_b.status_code, status.HTTP_200_OK)
PrincetonUniversity/pywsse
[ 7, 3, 7, 1, 1472563613 ]
def advertise(cls, subparsers): help_text = "print the program version" description_text = "Print the program version." subparsers.add_parser(cls._get_short_command_name(), help=help_text, description=description_text)
lueschem/edi
[ 33, 13, 33, 16, 1480665730 ]
def f_pulls(coroutine): for msg in coroutine: print(msg)
NLeSC/noodles
[ 21, 7, 21, 18, 1446464542 ]
def f_receives(): while True: msg = yield print(msg)
NLeSC/noodles
[ 21, 7, 21, 18, 1446464542 ]
def coroutine(f): @wraps(f) def g(*args, **kwargs): sink = f(*args, **kwargs) sink.send(None) return sink return g
NLeSC/noodles
[ 21, 7, 21, 18, 1446464542 ]
def __init__(self): self._q = queue.Queue()
NLeSC/noodles
[ 21, 7, 21, 18, 1446464542 ]
def sink(self): while True: msg = yield self._q.put(msg)
NLeSC/noodles
[ 21, 7, 21, 18, 1446464542 ]
def __init__(self): super().__init__() self.output = Endpoint(unsigned(32)) self.value = self.output.payload self.new_en = Signal() self.new_value = Signal(32)
google/CFU-Playground
[ 357, 91, 357, 130, 1615325898 ]
def __init__(self): super().__init__() self.output_streams = { i: Endpoint( unsigned(32)) for i in self.REGISTER_IDS} self.values = {i: Signal(32) for i in self.REGISTER_IDS} self.write_strobes = {i: Signal(1) for i in self.REGISTER_IDS}
google/CFU-Playground
[ 357, 91, 357, 130, 1615325898 ]
def testAddExistingComponent(self): c = Label('abc') tabSheet = TabSheet() tabSheet.addComponent(c) tabSheet.addComponent(c) itr = tabSheet.getComponentIterator() self.assertEquals(c, itr.next()) self.assertRaises(StopIteration, itr.next) self.assertNotEquals(tabSheet.getTab(c), None)
rwl/muntjac
[ 43, 14, 43, 5, 1316308871 ]
def testAddTabWithComponentOnly(self): tabSheet = TabSheet() tab1 = tabSheet.addTab(Label('aaa')) tab2 = tabSheet.addTab(Label('bbb')) tab3 = tabSheet.addTab(Label('ccc')) # Check right order of tabs self.assertEquals(0, tabSheet.getTabPosition(tab1)) self.assertEquals(1, tabSheet.getTabPosition(tab2)) self.assertEquals(2, tabSheet.getTabPosition(tab3)) # Calling addTab with existing component does not move tab tabSheet.addTab(tab1.getComponent()) # Check right order of tabs self.assertEquals(0, tabSheet.getTabPosition(tab1)) self.assertEquals(1, tabSheet.getTabPosition(tab2)) self.assertEquals(2, tabSheet.getTabPosition(tab3))
rwl/muntjac
[ 43, 14, 43, 5, 1316308871 ]
def testAddTabWithAllParameters(self): tabSheet = TabSheet() tab1 = tabSheet.addTab(Label('aaa')) tab2 = tabSheet.addTab(Label('bbb')) tab3 = tabSheet.addTab(Label('ccc')) tab4 = tabSheet.addTab(Label('ddd'), 'ddd', None, 1) tab5 = tabSheet.addTab(Label('eee'), 'eee', None, 3) self.assertEquals(0, tabSheet.getTabPosition(tab1)) self.assertEquals(1, tabSheet.getTabPosition(tab4)) self.assertEquals(2, tabSheet.getTabPosition(tab2)) self.assertEquals(3, tabSheet.getTabPosition(tab5)) self.assertEquals(4, tabSheet.getTabPosition(tab3)) # Calling addTab with existing component does not move tab tabSheet.addTab(tab1.getComponent(), 'xxx', None, 3) self.assertEquals(0, tabSheet.getTabPosition(tab1)) self.assertEquals(1, tabSheet.getTabPosition(tab4)) self.assertEquals(2, tabSheet.getTabPosition(tab2)) self.assertEquals(3, tabSheet.getTabPosition(tab5)) self.assertEquals(4, tabSheet.getTabPosition(tab3))
rwl/muntjac
[ 43, 14, 43, 5, 1316308871 ]
def test_disconnect(self): response_data = api.on_disconnect() self.assertEqual(ErrorCodes.OK, response_data[0])
thenetcircle/dino
[ 139, 6, 139, 11, 1475559609 ]
def clean_first_name(self): r = re.compile(u'^[А-ЯЁ][а-яё]*$', re.UNICODE) res = r.match(self.cleaned_data['first_name']) if res is None: raise ValidationError( _('Неверный формат имени: первыя буква должна быть заглавной, допустимы только русские символы.')) return self.cleaned_data['first_name']
IlyaGusev/PoetryCorpus
[ 40, 6, 40, 11, 1477270169 ]
def clean_password(self): l = len(self.cleaned_data['password']) if l <= 5 or l >= 30: raise ValidationError( _('Неверная длина пароля.')) return self.cleaned_data['password']
IlyaGusev/PoetryCorpus
[ 40, 6, 40, 11, 1477270169 ]
def with_lock(lock): """Make sure the lock is held while in this function.""" def decorator(func): @functools.wraps(func) def _with_lock(*args, **kwargs): with lock: return func(*args, **kwargs) return _with_lock return decorator
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def __new__(cls, ability, static_data): specific_data = static_data[ability.ability_id] if specific_data.remaps_to_ability_id: general_data = static_data[specific_data.remaps_to_ability_id] else: general_data = specific_data return super(_Ability, cls).__new__( cls, ability_id=general_data.ability_id, name=(general_data.friendly_name or general_data.button_name or general_data.link_name), footprint_radius=general_data.footprint_radius, requires_point=ability.requires_point, hotkey=specific_data.hotkey)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def __init__(self, surf, surf_type, surf_rect, world_to_surf, world_to_obs, draw): """A surface to display on screen. Args: surf: The actual pygame.Surface (or subsurface). surf_type: A SurfType, used to tell how to treat clicks in that area. surf_rect: Rect of the surface relative to the window. world_to_surf: Convert a world point to a pixel on the surface. world_to_obs: Convert a world point to a pixel in the observation. draw: A function that draws onto the surface. """ self.surf = surf self.surf_type = surf_type self.surf_rect = surf_rect self.world_to_surf = world_to_surf self.world_to_obs = world_to_obs self.draw = draw
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def draw_arc(self, color, world_loc, world_radius, start_angle, stop_angle, thickness=1): """Draw an arc using world coordinates, radius, start and stop angles.""" center = self.world_to_surf.fwd_pt(world_loc).round() radius = max(1, int(self.world_to_surf.fwd_dist(world_radius))) rect = pygame.Rect(center - radius, (radius * 2, radius * 2)) pygame.draw.arc(self.surf, color, rect, start_angle, stop_angle, thickness if thickness < radius else 0)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def draw_rect(self, color, world_rect, thickness=0): """Draw a rectangle using world coordinates.""" tl = self.world_to_surf.fwd_pt(world_rect.tl).round() br = self.world_to_surf.fwd_pt(world_rect.br).round() rect = pygame.Rect(tl, br - tl) pygame.draw.rect(self.surf, color, rect, thickness)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def write_screen(self, font, color, screen_pos, text, align="left", valign="top"): """Write to the screen in font.size relative coordinates.""" pos = point.Point(*screen_pos) * point.Point(0.75, 1) * font.get_linesize() text_surf = font.render(str(text), True, color) rect = text_surf.get_rect() if pos.x >= 0: setattr(rect, align, pos.x) else: setattr(rect, align, self.surf.get_width() + pos.x) if pos.y >= 0: setattr(rect, valign, pos.y) else: setattr(rect, valign, self.surf.get_height() + pos.y) self.surf.blit(text_surf, rect)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def surf_pos(self): return self.surf.world_to_surf.fwd_pt(self.world_pos)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def obs_pos(self): return self.surf.world_to_obs.fwd_pt(self.world_pos)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def _get_desktop_size(): """Get the desktop size.""" if platform.system() == "Linux": try: xrandr_query = subprocess.check_output(["xrandr", "--query"]) sizes = re.findall(r"\bconnected primary (\d+)x(\d+)", str(xrandr_query)) if sizes[0]: return point.Point(int(sizes[0][0]), int(sizes[0][1])) except: # pylint: disable=bare-except logging.error("Failed to get the resolution from xrandr.") # Most general, but doesn't understand multiple monitors. display_info = pygame.display.Info() return point.Point(display_info.current_w, display_info.current_h)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def __init__(self, fps=22.4, step_mul=1, render_sync=False, render_feature_grid=True, video=None): """Create a renderer for use by humans. Make sure to call `init` with the game info, or just use `run`. Args: fps: How fast should the game be run. step_mul: How many game steps to take per observation. render_sync: Whether to wait for the obs to render before continuing. render_feature_grid: When RGB and feature layers are available, whether to render the grid of feature layers. video: A filename to write the video to. Implicitly enables render_sync. """ self._fps = fps self._step_mul = step_mul self._render_sync = render_sync or bool(video) self._raw_actions = False self._render_player_relative = False self._render_rgb = None self._render_feature_grid = render_feature_grid self._window = None self._window_scale = 0.75 self._obs_queue = queue.Queue() self._render_thread = threading.Thread(target=self.render_thread, name="Renderer") self._render_thread.start() self._game_times = collections.deque(maxlen=100) # Avg FPS over 100 frames. # pytype: disable=wrong-keyword-args self._render_times = collections.deque(maxlen=100) # pytype: disable=wrong-keyword-args self._last_time = time.time() self._last_game_loop = 0 self._name_lengths = {} self._video_writer = video_writer.VideoWriter(video, fps) if video else None
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def init(self, game_info, static_data): """Take the game info and the static data needed to set up the game. This must be called before render or get_actions for each game or restart. Args: game_info: A `sc_pb.ResponseGameInfo` object for this game. static_data: A `StaticData` object for this game. Raises: ValueError: if there is nothing to render. """ self._game_info = game_info self._static_data = static_data if not game_info.HasField("start_raw"): raise ValueError("Raw observations are required for the renderer.") self._map_size = point.Point.build(game_info.start_raw.map_size) self._playable = point.Rect( point.Point.build(game_info.start_raw.playable_area.p0), point.Point.build(game_info.start_raw.playable_area.p1)) if game_info.options.HasField("feature_layer"): fl_opts = game_info.options.feature_layer self._feature_screen_px = point.Point.build(fl_opts.resolution) self._feature_minimap_px = point.Point.build(fl_opts.minimap_resolution) self._feature_camera_width_world_units = fl_opts.width self._render_rgb = False if not fl_opts.crop_to_playable_area: self._playable = point.Rect(self._map_size) else: self._feature_screen_px = self._feature_minimap_px = None if game_info.options.HasField("render"): render_opts = game_info.options.render self._rgb_screen_px = point.Point.build(render_opts.resolution) self._rgb_minimap_px = point.Point.build(render_opts.minimap_resolution) self._render_rgb = True else: self._rgb_screen_px = self._rgb_minimap_px = None if not self._feature_screen_px and not self._rgb_screen_px: raise ValueError("Nothing to render.") try: self.init_window() self._initialized = True except pygame.error as e: self._initialized = False logging.error("-" * 60) logging.error("Failed to initialize pygame: %s", e) logging.error("Continuing without pygame.") logging.error("If you're using ssh and have an X server, try ssh -X.") logging.error("-" * 60) self._obs = sc_pb.ResponseObservation() self._queued_action = None self._queued_hotkey = "" self._select_start = None self._alerts = {} self._past_actions = [] self._help = False self._last_zoom_time = 0
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def init_window(self): """Initialize the pygame window and lay out the surfaces.""" if platform.system() == "Windows": # Enable DPI awareness on Windows to give the correct window size. ctypes.windll.user32.SetProcessDPIAware() # pytype: disable=module-attr pygame.init() if self._render_rgb and self._rgb_screen_px: main_screen_px = self._rgb_screen_px else: main_screen_px = self._feature_screen_px window_size_ratio = main_screen_px num_feature_layers = 0 if self._render_feature_grid: # Want a roughly square grid of feature layers, each being roughly square. if self._game_info.options.raw: num_feature_layers += 5 if self._feature_screen_px: num_feature_layers += len(features.SCREEN_FEATURES) num_feature_layers += len(features.MINIMAP_FEATURES) if num_feature_layers > 0: feature_cols = math.ceil(math.sqrt(num_feature_layers)) feature_rows = math.ceil(num_feature_layers / feature_cols) features_layout = point.Point( feature_cols, feature_rows * 1.05) # Make room for titles. # Scale features_layout to main_screen_px height so we know its width. features_aspect_ratio = (features_layout * main_screen_px.y / features_layout.y) window_size_ratio += point.Point(features_aspect_ratio.x, 0) window_size_px = window_size_ratio.scale_max_size( _get_desktop_size() * self._window_scale).ceil() # Create the actual window surface. This should only be blitted to from one # of the sub-surfaces defined below. self._window = pygame.display.set_mode(window_size_px, 0, 32) pygame.display.set_caption("Starcraft Viewer") # The sub-surfaces that the various draw functions will draw to. self._surfaces = [] def add_surface(surf_type, surf_loc, world_to_surf, world_to_obs, draw_fn): """Add a surface. Drawn in order and intersect in reverse order.""" sub_surf = self._window.subsurface( pygame.Rect(surf_loc.tl, surf_loc.size)) self._surfaces.append(_Surface( sub_surf, surf_type, surf_loc, world_to_surf, world_to_obs, draw_fn)) self._scale = window_size_px.y // 32 self._font_small = pygame.font.Font(None, int(self._scale * 0.5)) self._font_large = pygame.font.Font(None, self._scale) def check_eq(a, b): """Used to run unit tests on the transforms.""" assert (a - b).len() < 0.0001, "%s != %s" % (a, b) # World has origin at bl, world_tl has origin at tl. self._world_to_world_tl = transform.Linear( point.Point(1, -1), point.Point(0, self._map_size.y)) check_eq(self._world_to_world_tl.fwd_pt(point.Point(0, 0)), point.Point(0, self._map_size.y)) check_eq(self._world_to_world_tl.fwd_pt(point.Point(5, 10)), point.Point(5, self._map_size.y - 10)) # Move the point to be relative to the camera. This gets updated per frame. self._world_tl_to_world_camera_rel = transform.Linear( offset=-self._map_size / 4) check_eq(self._world_tl_to_world_camera_rel.fwd_pt(self._map_size / 4), point.Point(0, 0)) check_eq( self._world_tl_to_world_camera_rel.fwd_pt( (self._map_size / 4) + point.Point(5, 10)), point.Point(5, 10)) if self._feature_screen_px: # Feature layer locations in continuous space. feature_world_per_pixel = (self._feature_screen_px / self._feature_camera_width_world_units) world_camera_rel_to_feature_screen = transform.Linear( feature_world_per_pixel, self._feature_screen_px / 2) check_eq(world_camera_rel_to_feature_screen.fwd_pt(point.Point(0, 0)), self._feature_screen_px / 2) check_eq( world_camera_rel_to_feature_screen.fwd_pt( point.Point(-0.5, -0.5) * self._feature_camera_width_world_units), point.Point(0, 0)) self._world_to_feature_screen = transform.Chain( self._world_to_world_tl, self._world_tl_to_world_camera_rel, world_camera_rel_to_feature_screen) self._world_to_feature_screen_px = transform.Chain( self._world_to_feature_screen, transform.PixelToCoord()) world_tl_to_feature_minimap = transform.Linear( self._feature_minimap_px / self._playable.diagonal.max_dim()) world_tl_to_feature_minimap.offset = world_tl_to_feature_minimap.fwd_pt( -self._world_to_world_tl.fwd_pt(self._playable.bl)) self._world_to_feature_minimap = transform.Chain( self._world_to_world_tl, world_tl_to_feature_minimap) self._world_to_feature_minimap_px = transform.Chain( self._world_to_feature_minimap, transform.PixelToCoord()) # These are confusing since self._playable is in world coords which is # (bl <= tr), but stored in a Rect that is (tl <= br). check_eq(self._world_to_feature_minimap.fwd_pt(self._playable.bl), point.Point(0, 0)) check_eq(self._world_to_feature_minimap.fwd_pt(self._playable.tr), self._playable.diagonal.scale_max_size(self._feature_minimap_px)) if self._rgb_screen_px: # RGB pixel locations in continuous space. # TODO(tewalds): Use a real 3d projection instead of orthogonal. rgb_world_per_pixel = (self._rgb_screen_px / 24) world_camera_rel_to_rgb_screen = transform.Linear( rgb_world_per_pixel, self._rgb_screen_px / 2) check_eq(world_camera_rel_to_rgb_screen.fwd_pt(point.Point(0, 0)), self._rgb_screen_px / 2) check_eq( world_camera_rel_to_rgb_screen.fwd_pt( point.Point(-0.5, -0.5) * 24), point.Point(0, 0)) self._world_to_rgb_screen = transform.Chain( self._world_to_world_tl, self._world_tl_to_world_camera_rel, world_camera_rel_to_rgb_screen) self._world_to_rgb_screen_px = transform.Chain( self._world_to_rgb_screen, transform.PixelToCoord()) world_tl_to_rgb_minimap = transform.Linear( self._rgb_minimap_px / self._map_size.max_dim()) check_eq(world_tl_to_rgb_minimap.fwd_pt(point.Point(0, 0)), point.Point(0, 0)) check_eq(world_tl_to_rgb_minimap.fwd_pt(self._map_size), self._map_size.scale_max_size(self._rgb_minimap_px)) self._world_to_rgb_minimap = transform.Chain( self._world_to_world_tl, world_tl_to_rgb_minimap) self._world_to_rgb_minimap_px = transform.Chain( self._world_to_rgb_minimap, transform.PixelToCoord()) # Renderable space for the screen. screen_size_px = main_screen_px.scale_max_size(window_size_px) minimap_size_px = self._playable.diagonal.scale_max_size(screen_size_px / 4) minimap_offset = point.Point(0, (screen_size_px.y - minimap_size_px.y)) if self._render_rgb: rgb_screen_to_main_screen = transform.Linear( screen_size_px / self._rgb_screen_px) add_surface(SurfType.RGB | SurfType.SCREEN, point.Rect(point.origin, screen_size_px), transform.Chain( # surf self._world_to_rgb_screen, rgb_screen_to_main_screen), self._world_to_rgb_screen_px, self.draw_screen) rgb_minimap_to_main_minimap = transform.Linear( minimap_size_px / self._rgb_minimap_px) add_surface(SurfType.RGB | SurfType.MINIMAP, point.Rect(minimap_offset, minimap_offset + minimap_size_px), transform.Chain( # surf self._world_to_rgb_minimap, rgb_minimap_to_main_minimap), self._world_to_rgb_minimap_px, self.draw_mini_map) else: # Feature layer main screen feature_screen_to_main_screen = transform.Linear( screen_size_px / self._feature_screen_px) add_surface(SurfType.FEATURE | SurfType.SCREEN, point.Rect(point.origin, screen_size_px), transform.Chain( # surf self._world_to_feature_screen, feature_screen_to_main_screen), self._world_to_feature_screen_px, self.draw_screen) feature_minimap_to_main_minimap = transform.Linear( minimap_size_px.max_dim() / self._feature_minimap_px.max_dim()) add_surface(SurfType.FEATURE | SurfType.MINIMAP, point.Rect(minimap_offset, minimap_offset + minimap_size_px), transform.Chain( # surf self._world_to_feature_minimap, feature_minimap_to_main_minimap), self._world_to_feature_minimap_px, self.draw_mini_map) if self._render_feature_grid and num_feature_layers > 0: # Add the raw and feature layers features_loc = point.Point(screen_size_px.x, 0) feature_pane = self._window.subsurface( pygame.Rect(features_loc, window_size_px - features_loc)) feature_pane.fill(colors.white / 2) feature_pane_size = point.Point(*feature_pane.get_size()) feature_grid_size = feature_pane_size / point.Point(feature_cols, feature_rows) feature_layer_area = point.Point(1, 1).scale_max_size( feature_grid_size) feature_layer_padding = feature_layer_area // 20 feature_layer_size = feature_layer_area - feature_layer_padding * 2 feature_font_size = int(feature_grid_size.y * 0.09) feature_font = pygame.font.Font(None, feature_font_size) feature_counter = itertools.count() def add_layer(surf_type, world_to_surf, world_to_obs, name, fn): """Add a layer surface.""" i = next(feature_counter) grid_offset = point.Point(i % feature_cols, i // feature_cols) * feature_grid_size text = feature_font.render(name, True, colors.white) rect = text.get_rect() rect.center = grid_offset + point.Point(feature_grid_size.x / 2, feature_font_size) feature_pane.blit(text, rect) surf_loc = (features_loc + grid_offset + feature_layer_padding + point.Point(0, feature_font_size)) add_surface(surf_type, point.Rect(surf_loc, surf_loc + feature_layer_size).round(), world_to_surf, world_to_obs, fn) raw_world_to_obs = transform.Linear() raw_world_to_surf = transform.Linear(feature_layer_size / self._map_size) def add_raw_layer(from_obs, name, color): add_layer(SurfType.FEATURE | SurfType.MINIMAP, raw_world_to_surf, raw_world_to_obs, "raw " + name, lambda surf: self.draw_raw_layer(surf, from_obs, name, color)) if self._game_info.options.raw: add_raw_layer(False, "terrain_height", colors.height_map(256)) add_raw_layer(False, "pathing_grid", colors.winter(2)) add_raw_layer(False, "placement_grid", colors.winter(2)) add_raw_layer(True, "visibility", colors.VISIBILITY_PALETTE) add_raw_layer(True, "creep", colors.CREEP_PALETTE) def add_feature_layer(feature, surf_type, world_to_surf, world_to_obs): add_layer(surf_type, world_to_surf, world_to_obs, feature.full_name, lambda surf: self.draw_feature_layer(surf, feature)) if self._feature_minimap_px: # Add the minimap feature layers feature_minimap_to_feature_minimap_surf = transform.Linear( feature_layer_size / self._feature_minimap_px) world_to_feature_minimap_surf = transform.Chain( self._world_to_feature_minimap, feature_minimap_to_feature_minimap_surf) for feature in features.MINIMAP_FEATURES: add_feature_layer(feature, SurfType.FEATURE | SurfType.MINIMAP, world_to_feature_minimap_surf, self._world_to_feature_minimap_px) if self._feature_screen_px: # Add the screen feature layers feature_screen_to_feature_screen_surf = transform.Linear( feature_layer_size / self._feature_screen_px) world_to_feature_screen_surf = transform.Chain( self._world_to_feature_screen, feature_screen_to_feature_screen_surf) for feature in features.SCREEN_FEATURES: add_feature_layer(feature, SurfType.FEATURE | SurfType.SCREEN, world_to_feature_screen_surf, self._world_to_feature_screen_px) # Add the help screen help_size = point.Point( (max(len(s) for s, _ in self.shortcuts) + max(len(s) for _, s in self.shortcuts)) * 0.4 + 4, len(self.shortcuts) + 3) * self._scale help_rect = point.Rect(window_size_px / 2 - help_size / 2, window_size_px / 2 + help_size / 2) add_surface(SurfType.CHROME, help_rect, None, None, self.draw_help) # Arbitrarily set the initial camera to the center of the map. self._update_camera(self._map_size / 2)
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def zoom(self, factor): """Zoom the window in/out.""" self._window_scale *= factor if time.time() - self._last_zoom_time < 1: # Avoid a deadlock in pygame if you zoom too quickly. time.sleep(time.time() - self._last_zoom_time) self.init_window() self._last_zoom_time = time.time()
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]
def clear_queued_action(self): self._queued_hotkey = "" self._queued_action = None
deepmind/pysc2
[ 7691, 1151, 7691, 50, 1501006617 ]