function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def test_should_encode_single_quote_when_single_quoted(self): encoded = cypher_repr(u"'", quote=u"'") assert encoded == u"'\\''"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_single_quote_when_double_quoted(self): encoded = cypher_repr(u"'", quote=u"\"") assert encoded == u'"\'"'
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_4_byte_extended_character(self): encoded = cypher_repr(u"\uABCD") assert encoded == u"'\\uabcd'"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_complex_sequence(self): encoded = cypher_repr(u"' '' '''") assert encoded == u"\"' '' '''\""
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_list(self): encoded = cypher_repr([1, 2.0, u"three"]) assert encoded == u"[1, 2.0, 'three']"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_map(self): encoded = cypher_repr(OrderedDict([("one", 1), ("two", 2.0), ("number three", u"three")])) assert encoded == u"{one: 1, two: 2.0, `number three`: 'three'}"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_empty_node(self): a = Node() encoded = cypher_repr(a, node_template="{labels} {properties}") assert encoded == u"({})"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_node_with_label(self): a = Node("Person") encoded = cypher_repr(a, node_template="{labels} {properties}") assert encoded == u"(:Person {})"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_can_encode_relationship(self): a = Node(name="Alice") b = Node(name="Bob") ab = KNOWS(a, b) encoded = cypher_repr(ab, related_node_template="{property.name}") self.assertEqual("(Alice)-[:KNOWS {}]->(Bob)", encoded)
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_can_encode_relationship_with_alternative_names(self): a = Node("Person", nom=u"Aimée") b = Node("Person", nom=u"Baptiste") ab = KNOWS_FR(a, b) encoded = cypher_repr(ab, related_node_template=u"{property.nom}") self.assertEqual(u"(Aimée)-[:CONNAÎT {}]->(Baptiste)", encoded)
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_filter(tmp_path, simulator): unit_test = tmp_path.joinpath('some_unit_test.sv') unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def test_filter_wildcards(tmp_path, simulator): failing_unit_test = tmp_path.joinpath('some_failing_unit_test.sv') failing_unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def test_filter_without_dot(tmp_path, simulator): dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv') dummy_unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def test_filter_with_extra_dot(tmp_path, simulator): dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv') dummy_unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def test_filter_with_partial_widlcard(tmp_path, simulator): dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv') dummy_unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def test_multiple_filter_expressions(tmp_path, simulator): unit_test = tmp_path.joinpath('some_unit_test.sv') unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def test_negative_filter(tmp_path, simulator): unit_test = tmp_path.joinpath('some_unit_test.sv') unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def test_positive_and_negative_filter(tmp_path, simulator): unit_test = tmp_path.joinpath('some_unit_test.sv') unit_test.write_text('''
svunit/svunit
[ 132, 50, 132, 79, 1447196118 ]
def logger_name_from_path(path): """Validate a logger URI path and get the logger name. :type path: str :param path: URI path for a logger API request. :rtype: str :returns: Logger name parsed from ``path``. :raises: :class:`ValueError` if the ``path`` is ill-formed or if the project from the ``path`` does not agree with the ``project`` passed in. """ return _name_from_project_path(path, None, _LOGGER_TEMPLATE)
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def _extract_payload(cls, resource): """Helper for :meth:`from_api_repr`""" return None
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def from_api_repr(cls, resource, client, loggers=None): """Factory: construct an entry given its API representation :type resource: dict :param resource: text entry resource representation returned from the API :type client: :class:`google.cloud.logging.client.Client` :param client: Client which holds credentials and project configuration. :type loggers: dict :param loggers: (Optional) A mapping of logger fullnames -> loggers. If not passed, the entry will have a newly-created logger. :rtype: :class:`google.cloud.logging.entries.LogEntry` :returns: Log entry parsed from ``resource``. """ if loggers is None: loggers = {} logger_fullname = resource['logName'] logger = loggers.get(logger_fullname) if logger is None: logger_name = logger_name_from_path(logger_fullname) logger = loggers[logger_fullname] = client.logger(logger_name) payload = cls._extract_payload(resource) insert_id = resource.get('insertId') timestamp = resource.get('timestamp') if timestamp is not None: timestamp = _rfc3339_nanos_to_datetime(timestamp) labels = resource.get('labels') severity = resource.get('severity') http_request = resource.get('httpRequest') trace = resource.get('trace') span_id = resource.get('spanId') trace_sampled = resource.get('traceSampled') source_location = resource.get('sourceLocation') if source_location is not None: line = source_location.pop('line', None) source_location['line'] = _int_or_none(line) operation = resource.get('operation') monitored_resource_dict = resource.get('resource') monitored_resource = None if monitored_resource_dict is not None: monitored_resource = Resource._from_dict(monitored_resource_dict) inst = cls( log_name=logger_fullname, insert_id=insert_id, timestamp=timestamp, labels=labels, severity=severity, http_request=http_request, resource=monitored_resource, trace=trace, span_id=span_id, trace_sampled=trace_sampled, source_location=source_location, operation=operation, logger=logger, payload=payload, ) received = resource.get('receiveTimestamp') if received is not None: inst.received_timestamp = _rfc3339_nanos_to_datetime(received) return inst
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def _extract_payload(cls, resource): """Helper for :meth:`from_api_repr`""" return resource['textPayload']
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def _extract_payload(cls, resource): """Helper for :meth:`from_api_repr`""" return resource['jsonPayload']
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def _extract_payload(cls, resource): """Helper for :meth:`from_api_repr`""" return resource['protoPayload']
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def payload_pb(self): if isinstance(self.payload, Any): return self.payload
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def payload_json(self): if not isinstance(self.payload, Any): return self.payload
jonparrott/google-cloud-python
[ 2, 1, 2, 1, 1443151125 ]
def main():
neurodata/ndstore
[ 39, 12, 39, 99, 1302639682 ]
def _NCNameChar(x): return x.isalpha() or x.isdigit() or x == "." or x == '-' or x == "_"
pycontribs/wstools
[ 7, 8, 7, 6, 1339586123 ]
def _toUnicodeHex(x): hexval = hex(ord(x[0]))[2:] hexlen = len(hexval) # Make hexval have either 4 or 8 digits by prepending 0's if (hexlen == 1): hexval = "000" + hexval elif (hexlen == 2): hexval = "00" + hexval elif (hexlen == 3): hexval = "0" + hexval elif (hexlen == 4): hexval = "" + hexval elif (hexlen == 5): hexval = "000" + hexval elif (hexlen == 6): hexval = "00" + hexval elif (hexlen == 7): hexval = "0" + hexval elif (hexlen == 8): hexval = "" + hexval else: raise Exception("Illegal Value returned from hex(ord(x))") return "_x" + hexval + "_"
pycontribs/wstools
[ 7, 8, 7, 6, 1339586123 ]
def toXMLname(string): """Convert string to a XML name.""" if string.find(':') != -1: (prefix, localname) = string.split(':', 1) else: prefix = None localname = string T = text_type(localname) N = len(localname) X = [] for i in range(N): if i < N - 1 and T[i] == u'_' and T[i + 1] == u'x': X.append(u'_x005F_') elif i == 0 and N >= 3 and \ (T[0] == u'x' or T[0] == u'X') and \ (T[1] == u'm' or T[1] == u'M') and \ (T[2] == u'l' or T[2] == u'L'): X.append(u'_xFFFF_' + T[0]) elif (not _NCNameChar(T[i])) or (i == 0 and not _NCNameStartChar(T[i])): X.append(_toUnicodeHex(T[i])) else: X.append(T[i]) if prefix: return "%s:%s" % (prefix, u''.join(X)) return u''.join(X)
pycontribs/wstools
[ 7, 8, 7, 6, 1339586123 ]
def fun(matchobj): return _fromUnicodeHex(matchobj.group(0))
pycontribs/wstools
[ 7, 8, 7, 6, 1339586123 ]
def get_clusters_info(uid): c_db = get_routeCluster_db() s_db = get_section_db() clusterJson = c_db.find_one({"clusters":{"$exists":True}, "user": uid}) if clusterJson is None: return [] c_info = [] clusterSectionLists= list(clusterJson["clusters"].values()) logging.debug( "Number of section lists for user %s is %s" % (uid, len(clusterSectionLists))) for sectionList in clusterSectionLists: first = True logging.debug( "Number of sections in sectionList for user %s is %s" % (uid, len(sectionList))) if (len(sectionList) == 0): # There's no point in returning this cluster, let's move on continue distributionArrays = [[] for _ in range(5)] for section in sectionList: section_json = s_db.find_one({"_id":section}) if first: representative_trip = section_json first = False appendIfPresent(distributionArrays[0], section_json, "section_start_datetime") appendIfPresent(distributionArrays[1], section_json, "section_end_datetime") appendIfPresent(distributionArrays[2], section_json, "section_start_point") appendIfPresent(distributionArrays[3], section_json, "section_end_point") appendIfPresent(distributionArrays[4], section_json, "confirmed_mode") c_info.append((distributionArrays, representative_trip)) return c_info
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def __init__(self, value): self.value = value
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def getCanonicalTrips(uid, get_representative=False): # number returned isnt used """ uid is a UUID object, not a string """ # canonical_trip_list = [] # x = 0 # if route clusters return nothing, then get common routes for user #clusters = get_routeCluster_db().find_one({'$and':[{'user':uid},{'method':'lcs'}]}) # c = get_routeCluster_db().find_one({'$and':[{'user':uid},{'method':'lcs'}]}) logging.debug('UUID for canonical %s' % uid) info = get_clusters_info(uid) cluster_json_list = [] for (cluster, rt) in info: json_dict = dict() json_dict["representative_trip"] = rt json_dict["start_point_distr"] = cluster[2] json_dict["end_point_distr"] = cluster[3] json_dict["start_time_distr"] = cluster[0] json_dict["end_time_distr"] = cluster[1] json_dict["confirmed_mode_list"] = cluster[4] cluster_json_list.append(json_dict) toRet = cluster_json_list return toRet.__iter__()
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def getAllTrips(uid): #trips = list(get_trip_db().find({"user_id":uid, "type":"move"})) query = {'user_id':uid, 'type':'move'} return get_trip_db().find(query)
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def getNoAlternatives(uid): # If pipelineFlags exists then we have started alternatives, and so have # already scheduled the query. No need to reschedule unless the query fails. # TODO: If the query fails, then remove the pipelineFlags so that we will # reschedule. query = {'user_id':uid, 'type':'move', 'pipelineFlags': {'$exists': False}} return get_trip_db().find(query)
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def getTrainingTrips(uid): return getTrainingTrips_Date(uid, 30) query = {'user_id':uid, 'type':'move'} return get_trip_db().find(query)
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def getAlternativeTrips(trip_id): #TODO: clean up datetime, and queries here #d = datetime.datetime.now() - datetime.timedelta(days=6) #query = {'trip_id':trip_id, 'trip_start_datetime':{"$gt":d}} query = {'trip_id':trip_id} alternatives = get_alternatives_db().find(query) if alternatives.estimated_document_count() > 0: logging.debug("Number of alternatives for trip %s is %d" % (trip_id, alternatives.estimated_document_count())) return alternatives raise AlternativesNotFound("No Alternatives Found")
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def getTripsThroughMode(uid): raise NotImplementedError()
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def saltfpringfilter(axc,ayc,arad,rxc,ryc,filterfreq,filterwidth,itmax,conv, fitwidth,image,logfile,useconfig,configfile,verbose): """ Determines the center coordinates of a ring, bins the ring radially and computes its power spectrum, and allows the user to select a smoothing filter for the ring. """
saltastro/pysalt
[ 15, 18, 15, 33, 1366643211 ]
def update_relations(self, **kwarg): pass
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def contributed_values(self): return self._contributed_values(self.contributed_values_obj)
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def _contributed_values(contributed_values_obj): if not contributed_values_obj: return {} if not isinstance(contributed_values_obj, list): contributed_values_obj = [contributed_values_obj] ret = {} try: for obj in contributed_values_obj: ret[obj.name.lower()] = obj.to_dict(exclude=["collection_id"]) except Exception as err: pass return ret
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def contributed_values(self, dict_values): return dict_values
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def records(self): """calculated property when accessed, not saved in the DB A view of the many to many relation""" return self._records(self.records_obj)
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def _records(records_obj): if not records_obj: return [] if not isinstance(records_obj, list): records_obj = [records_obj] ret = [] try: for rec in records_obj: ret.append(rec.to_dict(exclude=["dataset_id"])) except Exception as err: # raises exception of first access!! pass return ret
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def records(self, dict_values): return dict_values
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def contributed_values(self): return self._contributed_values(self.contributed_values_obj)
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def _contributed_values(contributed_values_obj): return DatasetORM._contributed_values(contributed_values_obj)
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def contributed_values(self, dict_values): return dict_values
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def records(self): """calculated property when accessed, not saved in the DB A view of the many to many relation""" return self._records(self.records_obj)
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def _records(records_obj): if not records_obj: return [] if not isinstance(records_obj, list): records_obj = [records_obj] ret = [] try: for rec in records_obj: ret.append(rec.to_dict(exclude=["reaction_dataset_id"])) except Exception as err: # raises exception of first access!! pass return ret
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def records(self, dict_values): return dict_values
psi4/mongo_qcdb
[ 120, 42, 120, 82, 1484857698 ]
def sm_section(name: str) -> str: """:return: section title used in .gitmodules configuration file""" return f'submodule "{name}"'
gitpython-developers/GitPython
[ 3885, 833, 3885, 140, 1291138443 ]
def mkhead(repo: 'Repo', path: PathLike) -> 'Head': """:return: New branch/head instance""" return git.Head(repo, git.Head.to_full_path(path))
gitpython-developers/GitPython
[ 3885, 833, 3885, 140, 1291138443 ]
def __init__(self, *args: Any, **kwargs: Any) -> None: self._smref: Union['ReferenceType[Submodule]', None] = None self._index = None self._auto_write = True super(SubmoduleConfigParser, self).__init__(*args, **kwargs)
gitpython-developers/GitPython
[ 3885, 833, 3885, 140, 1291138443 ]
def set_submodule(self, submodule: 'Submodule') -> None: """Set this instance's submodule. It must be called before the first write operation begins""" self._smref = weakref.ref(submodule)
gitpython-developers/GitPython
[ 3885, 833, 3885, 140, 1291138443 ]
def write(self) -> None: # type: ignore[override] rval: None = super(SubmoduleConfigParser, self).write() self.flush_to_index() return rval
gitpython-developers/GitPython
[ 3885, 833, 3885, 140, 1291138443 ]
def show_title(self, obj): if not obj.target: return '-- %s --' % ugettext('empty position') else: return u'%s [%s]' % (obj.target.title, ugettext(obj.target_ct.name),)
WhiskeyMedia/ella
[ 1, 1, 1, 1, 1345455562 ]
def is_filled(self, obj): if obj.target: return True else: return False
WhiskeyMedia/ella
[ 1, 1, 1, 1, 1345455562 ]
def is_active(self, obj): if obj.disabled: return False now = timezone.now() active_from = not obj.active_from or obj.active_from <= now active_till = not obj.active_till or obj.active_till > now return active_from and active_till
WhiskeyMedia/ella
[ 1, 1, 1, 1, 1345455562 ]
def save(names, filename): """Saves the named snippets to a file.""" root = ET.Element('snippets') root.text = '\n\n' root.tail = '\n' d = ET.ElementTree(root) comment = ET.Comment(_comment.format(appinfo=appinfo)) comment.tail = '\n\n' root.append(comment) for name in names: snippet = ET.Element('snippet') snippet.set('id', name) snippet.text = '\n' snippet.tail = '\n\n' title = ET.Element('title') title.text = snippets.title(name, False) title.tail = '\n' shortcuts = ET.Element('shortcuts') ss = model.shortcuts(name) if ss: shortcuts.text = '\n' for s in ss: shortcut = ET.Element('shortcut') shortcut.text = s.toString() shortcut.tail = '\n' shortcuts.append(shortcut) shortcuts.tail = '\n' body = ET.Element('body') body.text = snippets.text(name) body.tail = '\n' snippet.append(title) snippet.append(shortcuts) snippet.append(body) root.append(snippet) d.write(filename, "UTF-8")
wbsoft/frescobaldi
[ 612, 145, 612, 441, 1296225531 ]
def changed(item): if item in (new, updated): for i in range(item.childCount()): c = item.child(i) c.setCheckState(0, item.checkState(0))
wbsoft/frescobaldi
[ 612, 145, 612, 441, 1296225531 ]
def __init__(self, sun_vectors, sun_up_hours): """Radiance-based analemma. Args: sun_vectors: A list of sun vectors as (x, y, z). sun_up_hours: List of hours of the year that corresponds to sun_vectors. """ RadianceSky.__init__(self) vectors = sun_vectors or [] # reverse sun vectors self._sun_vectors = tuple(tuple(v) for v in vectors) self._sun_up_hours = sun_up_hours assert len(sun_up_hours) == len(vectors), \ ValueError( 'Length of vectors [%d] does not match the length of hours [%d]' % (len(vectors), len(sun_up_hours)) )
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def from_json(cls, inp): """Create an analemma from a dictionary.""" return cls(inp['sun_vectors'], inp['sun_up_hours'])
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def from_location(cls, location, hoys=None, north=0, is_leap_year=False): """Generate a radiance-based analemma for a location. Args: location: A ladybug location. hoys: A list of hours of the year (default: range(8760)). north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ sun_vectors = [] sun_up_hours = [] hoys = hoys or range(8760) north = north or 0 sp = Sunpath.from_location(location, north) sp.is_leap_year = is_leap_year for hour in hoys: sun = sp.calculate_sun_from_hoy(hour) if sun.altitude < 0: continue sun_vectors.append(sun.sun_vector) sun_up_hours.append(hour) return cls(sun_vectors, sun_up_hours)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def from_location_sun_up_hours(cls, location, sun_up_hours, north=0, is_leap_year=False): """Generate a radiance-based analemma for a location. Args: location: A ladybug location. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ sun_vectors = [] north = north or 0 sp = Sunpath.from_location(location, north) sp.is_leap_year = is_leap_year for hour in sun_up_hours: sun = sp.calculate_sun_from_hoy(hour) sun_vectors.append(sun.sun_vector) return cls(sun_vectors, sun_up_hours)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def from_wea(cls, wea, hoys=None, north=0, is_leap_year=False): """Generate a radiance-based analemma from a ladybug wea. NOTE: Only the location from wea will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: wea: A ladybug Wea. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location(wea.location, hoys, north, is_leap_year)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def from_wea_sun_up_hours(cls, wea, sun_up_hours, north=0, is_leap_year=False): """Generate a radiance-based analemma from a ladybug wea. NOTE: Only the location from wea will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: wea: A ladybug Wea. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location_sun_up_hours(wea.location, sun_up_hours, north, is_leap_year)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def from_epw_file(cls, epw_file, hoys=None, north=0, is_leap_year=False): """Create sun matrix from an epw file. NOTE: Only the location from epw file will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: epw_file: Full path to an epw file. hoys: A list of hours of the year (default: range(8760)). north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location(EPW(epw_file).location, hoys, north, is_leap_year)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def from_epw_file_sun_up_hours(cls, epw_file, sun_up_hours, north=0, is_leap_year=False): """Create sun matrix from an epw file. NOTE: Only the location from epw file will be used for creating analemma. For climate-based sun materix see SunMatrix class. Args: epw_file: Full path to an epw file. sun_up_hours: A list of hours of the year to be included in analemma. north: North angle from Y direction (default: 0). is_leap_year: A boolean to indicate if hours are for a leap year (default: False). """ return cls.from_location_sun_up_hours(EPW(epw_file).location, sun_up_hours, north, is_leap_year)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def isAnalemma(self): """Return True.""" return True
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def is_climate_based(self): """Return True if generated based on values from weather file.""" return False
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def analemma_file(self): """Analemma file name. Use this file to create the octree. """ return 'analemma.rad'
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def sunlist_file(self): """Sun list file name. Use this file as the list of modifiers in rcontrib. """ return 'analemma.mod'
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def sun_vectors(self): """Return list of sun vectors.""" return self._sun_vectors
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def sun_up_hours(self): """Return list of hours for sun vectors.""" return self._sun_up_hours
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def duplicate(self): """Duplicate this class.""" return Analemma(self.sun_vectors, self.sun_up_hours)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def to_json(self): """Convert analemma to a dictionary.""" return {'sun_vectors': self.sun_vectors, 'sun_up_hours': self.sun_up_hours}
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def __repr__(self): """Analemma representation.""" return 'Analemma: #%d' % len(self.sun_vectors)
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def analemma_file(self): """Analemma file name. Use this file to create the octree. """ return 'analemma_reversed.rad'
ladybug-analysis-tools/honeybee
[ 90, 26, 90, 38, 1451000618 ]
def _get_contracts_list(self, employee): '''Return list of contracts in chronological order''' contracts = [] for c in employee.contract_ids: l = len(contracts) if l == 0: contracts.append(c) else: dCStart = datetime.strptime(c.date_start, OE_DATEFORMAT).date() i = l - 1 while i >= 0: dContractStart = datetime.strptime( contracts[i].date_start, OE_DATEFORMAT).date() if dContractStart < dCStart: contracts = contracts[:i + 1] + [c] + contracts[i + 1:] break elif i == 0: contracts = [c] + contracts i -= 1 return contracts
bwrsandman/openerp-hr
[ 1, 3, 1, 4, 1402666223 ]
def get_months_service_to_date(self, cr, uid, ids, dToday=None, context=None): '''Returns a dictionary of floats. The key is the employee id, and the value is number of months of employment.''' res = dict.fromkeys(ids, 0) if dToday == None: dToday = date.today() for ee in self.pool.get('hr.employee').browse(cr, uid, ids, context=context): delta = relativedelta(dToday, dToday) contracts = self._get_contracts_list(ee) if len(contracts) == 0: res[ee.id] = (0.0, False) continue dInitial = datetime.strptime( contracts[0].date_start, OE_DATEFORMAT).date() if ee.initial_employment_date: dFirstContract = dInitial dInitial = datetime.strptime( ee.initial_employment_date, '%Y-%m-%d').date() if dFirstContract < dInitial: raise osv.except_osv(_('Employment Date mismatch!'), _('The initial employment date cannot be after the first contract in the system.\nEmployee: %s', ee.name)) delta = relativedelta(dFirstContract, dInitial) for c in contracts: dStart = datetime.strptime(c.date_start, '%Y-%m-%d').date() if dStart >= dToday: continue # If the contract doesn't have an end date, use today's date # If the contract has finished consider the entire duration of # the contract, otherwise consider only the months in the # contract until today. # if c.date_end: dEnd = datetime.strptime(c.date_end, '%Y-%m-%d').date() else: dEnd = dToday if dEnd > dToday: dEnd = dToday delta += relativedelta(dEnd, dStart) # Set the number of months the employee has worked date_part = float(delta.days) / float( self._get_days_in_month(dInitial)) res[ee.id] = ( float((delta.years * 12) + delta.months) + date_part, dInitial) return res
bwrsandman/openerp-hr
[ 1, 3, 1, 4, 1402666223 ]
def _search_amount(self, cr, uid, obj, name, args, context): ids = set() for cond in args: amount = cond[2] if isinstance(cond[2], (list, tuple)): if cond[1] in ['in', 'not in']: amount = tuple(cond[2]) else: continue else: if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']: continue cr.execute("select id from hr_employee having %s %%s" % (cond[1]), (amount,)) res_ids = set(id[0] for id in cr.fetchall()) ids = ids and (ids & res_ids) or res_ids if ids: return [('id', 'in', tuple(ids))] return [('id', '=', '0')]
bwrsandman/openerp-hr
[ 1, 3, 1, 4, 1402666223 ]
def edit(self, spec, prefix): if '%gcc' in spec: if '+openmp' in spec: make_include = join_path('arch', 'makefile.include.linux_gnu_omp') else: make_include = join_path('arch', 'makefile.include.linux_gnu') elif '%nvhpc' in spec: make_include = join_path('arch', 'makefile.include.linux_pgi') filter_file('-pgc++libs', '-c++libs', make_include, string=True) filter_file('pgcc', spack_cc, make_include) filter_file('pgc++', spack_cxx, make_include, string=True) filter_file('pgfortran', spack_fc, make_include) filter_file('/opt/pgi/qd-2.3.17/install/include', spec['qd'].prefix.include, make_include) filter_file('/opt/pgi/qd-2.3.17/install/lib', spec['qd'].prefix.lib, make_include) elif '%aocc' in spec: if '+openmp' in spec: copy( join_path('arch', 'makefile.include.linux_gnu_omp'), join_path('arch', 'makefile.include.linux_aocc_omp') ) make_include = join_path('arch', 'makefile.include.linux_aocc_omp') else: copy( join_path('arch', 'makefile.include.linux_gnu'), join_path('arch', 'makefile.include.linux_aocc') ) make_include = join_path('arch', 'makefile.include.linux_aocc') filter_file( 'gcc', '{0} {1}'.format(spack_cc, '-Mfree'), make_include, string=True ) filter_file('g++', spack_cxx, make_include, string=True) filter_file('^CFLAGS_LIB[ ]{0,}=.*$', 'CFLAGS_LIB = -O3', make_include) filter_file('^FFLAGS_LIB[ ]{0,}=.*$', 'FFLAGS_LIB = -O2', make_include) filter_file('^OFLAG[ ]{0,}=.*$', 'OFLAG = -O3', make_include) filter_file('^FC[ ]{0,}=.*$', 'FC = {0}'.format(spec['mpi'].mpifc), make_include, string=True) filter_file('^FCL[ ]{0,}=.*$', 'FCL = {0}'.format(spec['mpi'].mpifc), make_include, string=True) else: if '+openmp' in spec: make_include = join_path('arch', 'makefile.include.linux_{0}_omp'. format(spec.compiler.name)) else: make_include = join_path('arch', 'makefile.include.linux_' + spec.compiler.name) os.rename(make_include, 'makefile.include') # This bunch of 'filter_file()' is to make these options settable # as environment variables filter_file('^CPP_OPTIONS[ ]{0,}=[ ]{0,}', 'CPP_OPTIONS ?= ', 'makefile.include') filter_file('^FFLAGS[ ]{0,}=[ ]{0,}', 'FFLAGS ?= ', 'makefile.include') filter_file('^LIBDIR[ ]{0,}=.*$', '', 'makefile.include') filter_file('^BLAS[ ]{0,}=.*$', 'BLAS ?=', 'makefile.include') filter_file('^LAPACK[ ]{0,}=.*$', 'LAPACK ?=', 'makefile.include') filter_file('^FFTW[ ]{0,}?=.*$', 'FFTW ?=', 'makefile.include') filter_file('^MPI_INC[ ]{0,}=.*$', 'MPI_INC ?=', 'makefile.include') filter_file('-DscaLAPACK.*$\n', '', 'makefile.include') filter_file('^SCALAPACK[ ]{0,}=.*$', 'SCALAPACK ?=', 'makefile.include') if '+cuda' in spec: filter_file('^OBJECTS_GPU[ ]{0,}=.*$', 'OBJECTS_GPU ?=', 'makefile.include') filter_file('^CPP_GPU[ ]{0,}=.*$', 'CPP_GPU ?=', 'makefile.include') filter_file('^CFLAGS[ ]{0,}=.*$', 'CFLAGS ?=', 'makefile.include') if '+vaspsol' in spec: copy('VASPsol/src/solvation.F', 'src/')
LLNL/spack
[ 3244, 1839, 3244, 2847, 1389172932 ]
def build(self, spec, prefix): if '+cuda' in self.spec: make('gpu', 'gpu_ncl') else: make('std', 'gam', 'ncl')
LLNL/spack
[ 3244, 1839, 3244, 2847, 1389172932 ]
def _parse_class_args(self): """Parse the contrailplugin.ini file. Opencontrail supports extension such as ipam, policy, these extensions can be configured in the plugin configuration file as shown below. Plugin then loads the specified extensions. contrail_extensions=ipam:<classpath>,policy:<classpath> """ contrail_extensions = cfg.CONF.APISERVER.contrail_extensions # If multiple class specified for same extension, last one will win # according to DictOpt behavior for ext_name, ext_class in contrail_extensions.items(): try: if not ext_class: LOG.error(_('Malformed contrail extension...')) continue self.supported_extension_aliases.append(ext_name) ext_class = importutils.import_class(ext_class) ext_instance = ext_class() ext_instance.set_core(self) for method in dir(ext_instance): for prefix in ['get', 'update', 'delete', 'create']: if method.startswith('%s_' % prefix): setattr(self, method, ext_instance.__getattribute__(method)) except Exception: LOG.exception(_("Contrail Backend Error")) # Converting contrail backend error to Neutron Exception raise InvalidContrailExtensionError( ext_name=ext_name, ext_class=ext_class) #keystone self._authn_token = None if cfg.CONF.auth_strategy == 'keystone': kcfg = cfg.CONF.keystone_authtoken body = '{"auth":{"passwordCredentials":{' body += ' "username": "%s",' % (kcfg.admin_user) body += ' "password": "%s"},' % (kcfg.admin_password) body += ' "tenantName":"%s"}}' % (kcfg.admin_tenant_name) self._authn_body = body self._authn_token = cfg.CONF.keystone_authtoken.admin_token self._keystone_url = "%s://%s:%s%s" % ( cfg.CONF.keystone_authtoken.auth_protocol, cfg.CONF.keystone_authtoken.auth_host, cfg.CONF.keystone_authtoken.auth_port, "/v2.0/tokens")
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _get_base_binding_dict(self): binding = { portbindings.VIF_TYPE: portbindings.VIF_TYPE_VROUTER, portbindings.VIF_DETAILS: { # TODO(praneetb): Replace with new VIF security details portbindings.CAP_PORT_FILTER: 'security-group' in self.supported_extension_aliases } } return binding
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _request_api_server(self, url, data=None, headers=None): # Attempt to post to Api-Server response = requests.post(url, data=data, headers=headers) if (response.status_code == requests.codes.unauthorized): # Get token from keystone and save it for next request response = requests.post(self._keystone_url, data=self._authn_body, headers={'Content-type': 'application/json'}) if (response.status_code == requests.codes.ok): # plan is to re-issue original request with new token auth_headers = headers or {} authn_content = json.loads(response.text) self._authn_token = authn_content['access']['token']['id'] auth_headers['X-AUTH-TOKEN'] = self._authn_token response = self._request_api_server(url, data, auth_headers) else: raise RuntimeError('Authentication Failure') return response
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _relay_request(self, url_path, data=None): """Send received request to api server.""" url = "http://%s:%s%s" % (cfg.CONF.APISERVER.api_server_ip, cfg.CONF.APISERVER.api_server_port, url_path) return self._request_api_server_authn( url, data=data, headers={'Content-type': 'application/json'})
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _encode_context(self, context, operation, apitype): cdict = {'user_id': getattr(context, 'user_id', ''), 'is_admin': getattr(context, 'is_admin', False), 'operation': operation, 'type': apitype, 'tenant_id': getattr(context, 'tenant_id', None)} if context.roles: cdict['roles'] = context.roles if context.tenant: cdict['tenant'] = context.tenant return cdict
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _prune(self, resource_dict, fields): if fields: return dict(((key, item) for key, item in resource_dict.items() if key in fields)) return resource_dict
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _raise_contrail_error(self, status_code, info, obj_name): if status_code == requests.codes.bad_request: raise ContrailBadRequestError( msg=info['message'], resource=obj_name) error_class = CONTRAIL_EXCEPTION_MAP[status_code] raise error_class(msg=info['message'])
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _get_resource(self, res_type, context, id, fields): """Get a resource from API server. This method gets a resource from the contrail api server """ res_dict = self._encode_resource(resource_id=id, fields=fields) status_code, res_info = self._request_backend(context, res_dict, res_type, 'READ') res_dicts = self._transform_response(status_code, info=res_info, fields=fields, obj_name=res_type) LOG.debug("get_%(res_type)s(): %(res_dicts)s", {'res_type': res_type, 'res_dicts': res_dicts}) return res_dicts
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _delete_resource(self, res_type, context, id): """Delete a resource in API server This method deletes a resource in the contrail api server """ res_dict = self._encode_resource(resource_id=id) LOG.debug("delete_%(res_type)s(): %(id)s", {'res_type': res_type, 'id': id}) status_code, res_info = self._request_backend(context, res_dict, res_type, 'DELETE') if status_code != requests.codes.ok: self._raise_contrail_error(status_code, info=res_info, obj_name=res_type)
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def _count_resource(self, res_type, context, filters): res_dict = self._encode_resource(filters=filters) status_code, res_count = self._request_backend(context, res_dict, res_type, 'READCOUNT') LOG.debug("get_%(res_type)s_count(): %(res_count)r", {'res_type': res_type, 'res_count': res_count}) return res_count
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def create_network(self, context, network): """Creates a new Virtual Network.""" return self._create_resource('network', context, network)
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def update_network(self, context, network_id, network): """Updates the attributes of a particular Virtual Network.""" return self._update_resource('network', context, network_id, network)
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def get_networks(self, context, filters=None, fields=None): """Get the list of Virtual Networks.""" return self._list_resource('network', context, filters, fields)
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]
def create_subnet(self, context, subnet): """Creates a new subnet, and assigns it a symbolic name.""" if subnet['subnet']['gateway_ip'] is None: subnet['subnet']['gateway_ip'] = '0.0.0.0' if subnet['subnet']['host_routes'] != attr.ATTR_NOT_SPECIFIED: if (len(subnet['subnet']['host_routes']) > cfg.CONF.max_subnet_host_routes): raise exc.HostRoutesExhausted(subnet_id=subnet[ 'subnet'].get('id', _('new subnet')), quota=cfg.CONF.max_subnet_host_routes) subnet_created = self._create_resource('subnet', context, subnet) return self._make_subnet_dict(subnet_created)
cloudwatt/contrail-neutron-plugin
[ 1, 2, 1, 1, 1401891835 ]