function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def samefile(self, repo_file, ext_file): if self.hard: if os.path.islink(ext_file): return False if not os.path.exists(repo_file): return False return filecmp.cmp(repo_file, ext_file, shallow=False) else: # not using os.samefile since it resolves repo_file as well which # is not what we want return os.path.realpath(ext_file) == os.path.abspath(repo_file)
Cube777/dotgit
[ 158, 13, 158, 2, 1451246258 ]
def __init__(self, key): """ Set the key to be used for en-/de-cryption. """ self.twofish = twofish.Twofish() self.twofish.set_key(key)
hupf/passwordchest
[ 4, 1, 4, 7, 1255897186 ]
def decrypt(self, ciphertext): """ Decrypt the given string using Twofish ECB. """ if len(ciphertext) % 16: raise RuntimeError("Twofish ciphertext length must be a multiple of 16") plaintext = "" while len(ciphertext) >= 16: plaintext += self.twofish.decrypt(ciphertext[0:16]) ciphertext = ciphertext[16:] return plaintext
hupf/passwordchest
[ 4, 1, 4, 7, 1255897186 ]
def parse_version(version): match = VERSION_RE.match(version) if not match: return None, None, 0 ver = match.group(3) rel = match.group(4) if match.group(2): epoch = int(match.group(2)) else: epoch = 0 return ver, rel, epoch
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def get_split_packages_info(): '''Return info on split packages that do not have an actual package name matching the split pkgbase.''' pkgnames = Package.objects.values('pkgname') split_pkgs = Package.objects.exclude(pkgname=F('pkgbase')).exclude( pkgbase__in=pkgnames).values('pkgbase', 'repo', 'arch').annotate( last_update=Max('last_update')).distinct() all_arches = Arch.objects.in_bulk({s['arch'] for s in split_pkgs}) all_repos = Repo.objects.in_bulk({s['repo'] for s in split_pkgs}) for split in split_pkgs: split['arch'] = all_arches[split['arch']] split['repo'] = all_repos[split['repo']] return split_pkgs
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def __init__(self, pkgname, repo, pkg_a, pkg_b): self.pkgname = pkgname self.repo = repo self.pkg_a = pkg_a self.pkg_b = pkg_b
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def __key(self): return (self.pkgname, hash(self.repo), hash(self.pkg_a), hash(self.pkg_b))
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def __hash__(self): return hash(self.__key())
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def get_wrong_permissions(): sql = """
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def attach_maintainers(packages): '''Given a queryset or something resembling it of package objects, find all the maintainers and attach them to the packages to prevent N+1 query cascading.''' if isinstance(packages, QuerySet): pkgbases = packages.values('pkgbase') else: packages = list(packages) pkgbases = {p.pkgbase for p in packages if p is not None} rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, pkgbase__in=pkgbases).values_list( 'pkgbase', 'user_id').order_by().distinct() # get all the user objects we will need user_ids = {rel[1] for rel in rels} users = User.objects.in_bulk(user_ids) # now build a pkgbase -> [maintainers...] map maintainers = defaultdict(list) for rel in rels: maintainers[rel[0]].append(users[rel[1]]) annotated = [] # and finally, attach the maintainer lists on the original packages for package in packages: if package is None: continue package.maintainers = maintainers[package.pkgbase] annotated.append(package) return annotated
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def __init__(self, packages): if len(packages) == 0: raise Exception self.packages = packages self.user = None self.target_repo = None self.signoffs = set() self.default_spec = True first = packages[0] self.pkgbase = first.pkgbase self.arch = first.arch self.repo = first.repo self.version = '' self.last_update = first.last_update self.packager = first.packager self.maintainers = first.maintainers self.specification = fake_signoff_spec(first.arch) version = first.full_version if all(version == pkg.full_version for pkg in packages): self.version = version
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def package(self): '''Try and return a relevant single package object representing this group. Start by seeing if there is only one package, then look for the matching package by name, finally falling back to a standin package object.''' if len(self.packages) == 1: return self.packages[0] same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase] if same_pkgs: return same_pkgs[0] return PackageStandin(self.packages[0])
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def find_specification(self, specifications): for spec in specifications: if spec.pkgbase != self.pkgbase: continue if self.version and not spec.full_version == self.version: continue if spec.arch_id == self.arch.id and spec.repo_id == self.repo.id: self.specification = spec self.default_spec = False return
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def completed(self): return sum(1 for s in self.signoffs if not s.revoked)
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def required(self): return self.specification.required
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def __unicode__(self): return f'{self.pkgbase}-{self.version} (self.arch): {len(self.signoffs)}'
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def get_current_signoffs(repos): '''Returns a list of signoff objects for the given repos.''' to_fetch = signoffs_id_query(Signoff, repos) return Signoff.objects.select_related('user').in_bulk(to_fetch).values()
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def get_target_repo_map(repos): sql = """
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def get_signoff_groups(repos=None, user=None): if repos is None: repos = Repo.objects.filter(testing=True) repo_ids = [r.pk for r in repos] test_pkgs = Package.objects.select_related( 'arch', 'repo', 'packager').filter(repo__in=repo_ids) packages = test_pkgs.order_by('pkgname') packages = attach_maintainers(packages) # Filter by user if asked to do so if user is not None: packages = [p for p in packages if user == p.packager or user in p.maintainers] # Collect all pkgbase values in testing repos pkgtorepo = get_target_repo_map(repos) # Collect all possible signoffs and specifications for these packages signoffs = get_current_signoffs(repos) specs = get_current_specifications(repos) same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase) grouped = groupby_preserve_order(packages, same_pkgbase_key) signoff_groups = [] for group in grouped: signoff_group = PackageSignoffGroup(group) signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase, "Unknown") signoff_group.find_signoffs(signoffs) signoff_group.find_specification(specs) signoff_groups.append(signoff_group) return signoff_groups
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def default(self, obj): if hasattr(obj, '__iter__'): # mainly for queryset serialization return list(obj) if isinstance(obj, Package): data = {attr: getattr(obj, attr) for attr in self.pkg_attributes} for attr in self.pkg_list_attributes: data[attr] = getattr(obj, attr).all() all_deps = obj.depends.all() for (deptype, name) in DEPENDENCY_TYPES: data[name] = all_deps.filter(deptype=deptype) return data if isinstance(obj, PackageFile): filename = obj.filename or '' return obj.directory + filename if isinstance(obj, (Repo, Arch)): return obj.name.lower() if isinstance(obj, (PackageGroup, License)): return obj.name if isinstance(obj, (Depend, Conflict, Provision, Replacement)): return str(obj) elif isinstance(obj, User): return obj.username elif isinstance(obj, TodolistPackage): data = self.default(obj.pkg) for attr in self.todolistpackage_attributes: data[attr] = getattr(obj, attr) return data return super(PackageJSONEncoder, self).default(obj)
archlinux/archweb
[ 270, 117, 270, 77, 1370983197 ]
def __init__(self,id): self.id=id self.taxable=1 self.price=simple_items[id][2] self.name=simple_items[id][0] self.label=simple_items[id][1] self.taxable=True try: self.taxable=simple_items[id][3]["taxable"] except: pass
johm/infoshopkeeper
[ 8, 3, 8, 3, 1323614202 ]
def getPrice(self): return self.price
johm/infoshopkeeper
[ 8, 3, 8, 3, 1323614202 ]
def getName(self): return self.name
johm/infoshopkeeper
[ 8, 3, 8, 3, 1323614202 ]
def getLabel(self): return self.label
johm/infoshopkeeper
[ 8, 3, 8, 3, 1323614202 ]
def addToOrder(self): return 0
johm/infoshopkeeper
[ 8, 3, 8, 3, 1323614202 ]
def removeFromOrder(self): return 0
johm/infoshopkeeper
[ 8, 3, 8, 3, 1323614202 ]
def finalizeOrder(self,cursor,cashier,paid_how): cursor.execute (""" INSERT INTO transactionLog SET action = "SALE", amount = %s, cashier = %s, date = NOW(), info = %s,
johm/infoshopkeeper
[ 8, 3, 8, 3, 1323614202 ]
def getUVs(object, particle_system): """ returns a numpy-array of uv - coordinates for a given particle-system on a given object """ locations = [p.location for p in object.particle_systems[particle_system].particles] uvs = [pam.map3dPointToUV(object, object, loc) for loc in locations] return np.array(uvs)
MartinPyka/Parametric-Anatomical-Modeling
[ 11, 6, 11, 5, 1432640367 ]
def export_connections(filepath): """Export connection and distance-informations :param str filepath: export filename .. note:: * cmatrices: list of connection matrices * dmatrices: list of distance matrices * nglist: list of neural groups * connection_list: list of layer-based connections """ cmatrices = [] dmatrices = [] for c in model.CONNECTION_RESULTS: cmatrices.append(c['c']) dmatrices.append(c['d']) mapping_names = get_mapping_names() with zipfile.ZipFile(filepath, 'w', zipfile.ZIP_DEFLATED) as file: csv_write_matrices(file, "c", cmatrices) csv_write_matrices(file, "d", dmatrices) csv_write_matrix(file, "names", mapping_names) csv_write_matrix(file, "connections", model.MODEL.connection_indices) csv_write_matrix(file, "neurongroups", model.MODEL.ng_list)
MartinPyka/Parametric-Anatomical-Modeling
[ 11, 6, 11, 5, 1432640367 ]
def export_UVfactors(filepath, uv_matrices, layer_names): """Export UV-matrices, including the length of a real edge an its UV-distance :param str filename: export filename :param numpy.Array uv_matrices: :param list layer_names: .. note:: * uv_matrices: list of uv-matrices * layer_names: list of layer-names, the order corresponds to the list-order in uv_matrices """ with zipfile.ZipFile(filepath, 'w', zipfile.ZIP_DEFLATED) as file: for i, matrix in enumerate(uv_matrices): csv_write_matrix(file, layer_names[i], [matrix])
MartinPyka/Parametric-Anatomical-Modeling
[ 11, 6, 11, 5, 1432640367 ]
def csv_write_matrices(file, suffix, matrices): """Write matrices to csv file :param file file: open file :param str suffix: suffix for filename :param list matrices: a list of matrices """ for i, matrix in enumerate(matrices): output = io.StringIO() writer = csv.writer( output, delimiter=";", quoting=csv.QUOTE_NONNUMERIC ) for row in matrix: writer.writerow(row) file.writestr("%i_%s.csv" % (i, suffix), output.getvalue())
MartinPyka/Parametric-Anatomical-Modeling
[ 11, 6, 11, 5, 1432640367 ]
def poll(cls, context): return any(model.MODEL.connections)
MartinPyka/Parametric-Anatomical-Modeling
[ 11, 6, 11, 5, 1432640367 ]
def register(): """Call upon module register""" bpy.utils.register_class(PAMModelExportCSV)
MartinPyka/Parametric-Anatomical-Modeling
[ 11, 6, 11, 5, 1432640367 ]
def main(): """Main thresholdmon program""" parser = make_option_parser() (_options, _args) = parser.parse_args() init_generic_logging( logfile=LOG_FILE, stderr=False, stdout=True, read_config=True, ) django.setup() scan()
UNINETT/nav
[ 131, 35, 131, 187, 1484647509 ]
def scan(): """Scans for threshold rules and evaluates them""" rules = ThresholdRule.objects.all() alerts = get_unresolved_threshold_alerts() _logger.info("evaluating %d rules", len(rules)) for rule in rules: evaluate_rule(rule, alerts) _logger.info("done")
UNINETT/nav
[ 131, 35, 131, 187, 1484647509 ]
def evaluate_rule(rule, alerts): """ Evaluates the current status of a single rule and posts events if necessary. """ _logger.debug("evaluating rule %r", rule) evaluator = rule.get_evaluator() try: if not evaluator.get_values(): _logger.warning( "did not find any matching values for rule %r %s", rule.target, rule.alert ) except Exception: _logger.exception( "Unhandled exception while getting values for rule: %r", rule ) return # post new exceed events try: exceeded = evaluator.evaluate(rule.alert) except Exception: _logger.exception("Unhandled exception while evaluating rule alert: %r", rule) return for metric, value in exceeded: alert = alerts.get(rule.id, {}).get(metric, None) _logger.info("%s: %s %s (=%s)", "old" if alert else "new", metric, rule.alert, value) if not alert: start_event(rule, metric, value) # try to clear any existing threshold alerts if rule.id in alerts: clearable = alerts[rule.id] try: if rule.clear: cleared = evaluator.evaluate(rule.clear) else: cleared = evaluator.evaluate(rule.alert, invert=True) except Exception: _logger.exception( "Unhandled exception while evaluating rule clear: %r", rule) return for metric, value in cleared: if metric in clearable: _logger.info("cleared: %s %s (=%s)", metric, rule.clear, value) end_event(rule, metric, value)
UNINETT/nav
[ 131, 35, 131, 187, 1484647509 ]
def start_event(rule, metric, value): """Makes and posts a threshold start event""" event = make_event(True, rule, metric, value) _logger.debug("posted start event: %r", event) return event
UNINETT/nav
[ 131, 35, 131, 187, 1484647509 ]
def make_event(start, rule, metric, value): """Makes and posts a threshold event""" event = _event_template() event.state = event.STATE_START if start else event.STATE_END event.subid = "{rule}:{metric}".format(rule=rule.id, metric=metric) varmap = dict(metric=metric, alert=rule.alert, ruleid=six.text_type(rule.id), measured_value=six.text_type(value)) if rule.clear: varmap['clear'] = six.text_type(rule.clear) _add_subject_details(event, metric, varmap) event.save() if varmap: event.varmap = varmap return event
UNINETT/nav
[ 131, 35, 131, 187, 1484647509 ]
def _event_template(): event = Event() event.source_id = 'thresholdMon' event.target_id = 'eventEngine' event.event_type_id = 'thresholdState' return event
UNINETT/nav
[ 131, 35, 131, 187, 1484647509 ]
def __init__(self, conn=None, object_path=None, bus_name=None): super(BaseFacts, self).__init__(conn=conn, object_path=object_path, bus_name=bus_name) # Default is an empty FactsCollector self.facts_collector = self.facts_collector_class()
candlepin/subscription-manager
[ 59, 108, 59, 16, 1337271210 ]
def GetFacts(self, sender=None): collection = self.facts_collector.collect() cleaned = dict([(str(key), str(value)) for key, value in list(collection.data.items())]) return dbus.Dictionary(cleaned, signature="ss")
candlepin/subscription-manager
[ 59, 108, 59, 16, 1337271210 ]
def __init__(self, conn=None, object_path=None, bus_name=None): super(self.__class__, self).__init__(conn=conn, object_path=object_path, bus_name=bus_name) self.facts_collector = facts_collector
candlepin/subscription-manager
[ 59, 108, 59, 16, 1337271210 ]
def check_origin(self, origin): """ Prevents CORS attacks. Args: origin: HTTP "Origin" header. URL of initiator of the request. Returns: True if origin is legit, otherwise False """ # FIXME: implement CORS checking return True
zetaops/zengine
[ 82, 22, 82, 36, 1424353885 ]
def open(self): """ Called on new websocket connection. """ sess_id = self._get_sess_id() if sess_id: self.application.pc.websockets[self._get_sess_id()] = self self.write_message(json.dumps({"cmd": "status", "status": "open"})) else: self.write_message(json.dumps({"cmd": "error", "error": "Please login", "code": 401}))
zetaops/zengine
[ 82, 22, 82, 36, 1424353885 ]
def on_close(self): """ remove connection from pool on connection close. """ self.application.pc.unregister_websocket(self._get_sess_id())
zetaops/zengine
[ 82, 22, 82, 36, 1424353885 ]
def _handle_headers(self): """ Do response processing """ origin = self.request.headers.get('Origin') if not settings.DEBUG: if origin in settings.ALLOWED_ORIGINS or not origin: self.set_header('Access-Control-Allow-Origin', origin) else: log.debug("CORS ERROR: %s not allowed, allowed hosts: %s" % (origin, settings.ALLOWED_ORIGINS)) raise HTTPError(403, "Origin not in ALLOWED_ORIGINS: %s" % origin) else: self.set_header('Access-Control-Allow-Origin', origin or '*') self.set_header('Access-Control-Allow-Credentials', "true") self.set_header('Access-Control-Allow-Headers', 'Content-Type') self.set_header('Access-Control-Allow-Methods', 'OPTIONS') self.set_header('Content-Type', 'application/json')
zetaops/zengine
[ 82, 22, 82, 36, 1424353885 ]
def get(self, view_name): """ only used to display login form Args: view_name: should be "login" """ self.post(view_name)
zetaops/zengine
[ 82, 22, 82, 36, 1424353885 ]
def post(self, view_name): """ login handler """ sess_id = None input_data = {} # try: self._handle_headers() # handle input input_data = json_decode(self.request.body) if self.request.body else {} input_data['path'] = view_name # set or get session cookie if not self.get_cookie(COOKIE_NAME) or 'username' in input_data: sess_id = uuid4().hex self.set_cookie(COOKIE_NAME, sess_id) # , domain='127.0.0.1' else: sess_id = self.get_cookie(COOKIE_NAME) # h_sess_id = "HTTP_%s" % sess_id input_data = {'data': input_data, '_zops_remote_ip': self.request.remote_ip} log.info("New Request for %s: %s" % (sess_id, input_data)) self.application.pc.register_websocket(sess_id, self) self.application.pc.redirect_incoming_message(sess_id, json_encode(input_data), self.request)
zetaops/zengine
[ 82, 22, 82, 36, 1424353885 ]
def runserver(host=None, port=None): """ Run Tornado server """ host = host or os.getenv('HTTP_HOST', '0.0.0.0') port = port or os.getenv('HTTP_PORT', '9001') zioloop = ioloop.IOLoop.instance() # setup pika client: pc = QueueManager(zioloop) app.pc = pc pc.connect() app.listen(port, host) zioloop.start()
zetaops/zengine
[ 82, 22, 82, 36, 1424353885 ]
def _valida_codice_fiscale(codice_fiscale): """ Validatore esteso che verifica che il codice fiscale sia valido. Se il codice fiscale e' temporaneo (11 cifre numeriche), e' considerato valido. :param codice_fiscale: Il codice fiscale. :return: None. Exception in caso di validazione. """ try: codicefiscale.validate(codice_fiscale) except: if re.search("^[0-9]{11}$", codice_fiscale) is None: raise
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def ottieni_genere_da_codice_fiscale(codice_fiscale, default=None): try: return codicefiscale.get_gender(codice_fiscale) except: return default
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def _validatore(fieldfile_obj): filesize = fieldfile_obj.file.size megabyte_limit = mb if filesize > megabyte_limit*1024*1024: raise ValidationError("Seleziona un file più piccolo di %sMB" % str(megabyte_limit))
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def valida_partita_iva(partita_iva): try: return stdnum.it.iva.validate(partita_iva) except: raise ValidationError("Partita IVA non corretta.")
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def valida_dimensione_file_5mb(fieldfile_obj): filesize = fieldfile_obj.file.size megabyte_limit = 5 if filesize > megabyte_limit*1024*1024: raise ValidationError("Seleziona un file più piccolo di %sMB" % str(megabyte_limit))
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def valida_dimensione_file_8mb(fieldfile_obj): filesize = fieldfile_obj.file.size megabyte_limit = 8 if filesize > megabyte_limit*1024*1024: raise ValidationError("Seleziona un file più piccolo di %sMB" % str(megabyte_limit))
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def valida_email_personale(email): coppie = ( ('cl.', '@cri.it'), ('cp.', '@cri.it'), ('cr.', '@cri.it'), ) for coppia in coppie: if email and email.lower().startswith(coppia[0]) and email.lower().endswith(coppia[1]): raise ValidationError("Non è possibile utilizzare una casella istituzionale come " "indirizzo e-mail personale.")
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def __init__(self, filesize=2): self.filesize = filesize
CroceRossaItaliana/jorvik
[ 29, 20, 29, 131, 1424890427 ]
def get_full_sequence_threaded(worker,current_color,deepness): sequence=get_full_sequence(worker,current_color,deepness) threading.current_thread().sequence=sequence
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def run_analysis(self,current_move):
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def play(self,gtp_color,gtp_move):#GnuGo needs to redifine this method to apply it to all its workers if gtp_color=='w': self.bot.place_white(gtp_move) for worker in self.workers: worker.place_white(gtp_move) else: self.bot.place_black(gtp_move) for worker in self.workers: worker.place_black(gtp_move)
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def undo(self): self.bot.undo() for worker in self.workers: worker.undo()
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def terminate_bot(self): log("killing gnugo") self.gnugo.close() log("killing gnugo workers") for w in self.workers: w.close()
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def gnugo_starting_procedure(sgf_g,profile,silentfail=False): return bot_starting_procedure("GnuGo","GNU Go",GnuGo_gtp,sgf_g,profile,silentfail)
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def __init__(self,parent,filename,move_range,intervals,variation,komi,profile="slow",existing_variations="remove_everything"): RunAnalysisBase.__init__(self,parent,filename,move_range,intervals,variation,komi,profile,existing_variations)
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def __init__(self,g,filename,profile="slow"): LiveAnalysisBase.__init__(self,g,filename,profile)
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def get_gnugo_initial_influence_black(self): self.write("initial_influence black influence_regions") one_line=self.readline() one_line=one_line.split("= ")[1].strip().replace(" "," ") lines=[one_line] for i in range(self.size-1): one_line=self.readline().strip().replace(" "," ") lines.append(one_line)
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def get_gnugo_initial_influence_white(self): self.write("initial_influence white influence_regions") one_line=self.readline() one_line=one_line.split("= ")[1].strip().replace(" "," ") lines=[one_line] for i in range(self.size-1): one_line=self.readline().strip().replace(" "," ") lines.append(one_line)
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def quick_evaluation(self,color): return variation_data_formating["ES"]%self.get_gnugo_estimate_score()
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def get_gnugo_estimate_score(self): self.write("estimate_score") answer=self.readline().strip() try: return answer[2:] except: raise GRPException("GRPException in get_gnugo_estimate_score()")
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def gnugo_top_moves_black(self): self.write("top_moves_black") answer=self.readline()[:-1] try: answer=answer.split(" ")[1:-1] except: raise GRPException("GRPException in get_gnugo_top_moves_black()") answers_list=[] for value in answer: try: float(value) except: answers_list.append(value) return answers_list
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def get_gnugo_experimental_score(self,color): self.write("experimental_score "+color) answer=self.readline().strip() return answer[2:]
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def __init__(self,parent,bot="GnuGo"): Frame.__init__(self,parent) self.parent=parent self.bot=bot self.profiles=get_bot_profiles(bot,False) profiles_frame=self
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def clear_selection(self): self.index=-1 self.profile.set("") self.command.set("") self.parameters.set("") self.variations.set("") self.deepness.set("")
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def add_profile(self): profiles=self.profiles if self.profile.get()=="": return data={"bot":self.bot} data["profile"]=self.profile.get() data["command"]=self.command.get() data["parameters"]=self.parameters.get() data["variations"]=self.variations.get() data["deepness"]=self.deepness.get()
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def modify_profile(self): profiles=self.profiles if self.profile.get()=="": return
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def __init__(self,sgf_g,profile): BotOpenMove.__init__(self,sgf_g,profile) self.name='Gnugo' self.my_starting_procedure=gnugo_starting_procedure
pnprog/goreviewpartner
[ 268, 66, 268, 59, 1484372321 ]
def test_empty(): parsed = parsing._parse_requirement(io.StringIO(""" """)) assert parsed == {}
PyAr/fades
[ 192, 43, 192, 14, 1416192244 ]
def test_simple_default(): parsed = parsing._parse_requirement(io.StringIO(""" foo """)) assert parsed == {REPO_PYPI: get_reqs('foo')}
PyAr/fades
[ 192, 43, 192, 14, 1416192244 ]
def test_version_same(): parsed = parsing._parse_requirement(io.StringIO(""" pypi::foo == 3.5 """)) assert parsed == { REPO_PYPI: get_reqs('foo == 3.5') }
PyAr/fades
[ 192, 43, 192, 14, 1416192244 ]
def test_version_different(): parsed = parsing._parse_requirement(io.StringIO(""" foo !=3.5 """)) assert parsed == { REPO_PYPI: get_reqs('foo !=3.5') }
PyAr/fades
[ 192, 43, 192, 14, 1416192244 ]
def test_version_greater_two_spaces(): parsed = parsing._parse_requirement(io.StringIO(""" foo > 2 """)) assert parsed == { REPO_PYPI: get_reqs('foo > 2') }
PyAr/fades
[ 192, 43, 192, 14, 1416192244 ]
def test_comments(): parsed = parsing._parse_requirement(io.StringIO(""" pypi::foo # some text # other text bar """)) assert parsed == { REPO_PYPI: get_reqs('foo') + get_reqs('bar') }
PyAr/fades
[ 192, 43, 192, 14, 1416192244 ]
def test_vcs_simple(): parsed = parsing._parse_requirement(io.StringIO(""" vcs::strangeurl """)) assert parsed == {REPO_VCS: [parsing.VCSDependency("strangeurl")]}
PyAr/fades
[ 192, 43, 192, 14, 1416192244 ]
def main(): """Start here""" parser = make_parser() args = parser.parse_args() if args.version: from lintswitch import __version__ print(__version__) return 0 log_params = {'level': args.loglevel} if args.logfile: log_params['filename'] = args.logfile logging.basicConfig(**log_params) # pylint: disable=W0142 LOG.debug('lintswitch start') work_queue = Queue() check_proc = Thread(target=worker, args=(work_queue, args)) check_proc.daemon = True check_proc.start() server = Thread(target=http_server.http_server, args=(args.httpport,)) server.daemon = True server.start() # Listen for connections from vim (or other) plugin listener = socket.socket() listener.bind(('127.0.0.1', args.lintport)) listener.listen(10) try: main_loop(listener, work_queue) except KeyboardInterrupt: listener.close() print('Bye') return 0
grahamking/lintswitch
[ 26, 5, 26, 4, 1291506173 ]
def main_loop(listener, work_queue): """Wait for connections and process them. @param listener: a socket.socket, open and listening. """ while True: conn, _ = listener.accept() data = conn.makefile().read() conn.close() work_queue.put(data)
grahamking/lintswitch
[ 26, 5, 26, 4, 1291506173 ]
def find(name): """Finds a program on system path.""" for directory in syspath(): candidate = os.path.join(directory, name) if os.path.exists(candidate): return candidate return None
grahamking/lintswitch
[ 26, 5, 26, 4, 1291506173 ]
def getDocumentMD5(self): return hashlib.md5(str(self.incoterm)).hexdigest()
stanta/darfchain
[ 10, 15, 10, 2, 1496296948 ]
def get_ethereum_addres(self): ethereum_address = self.env['setting.connect'].search([('platforma','=','ethereum')]) result_ethereum_dic = {} if ethereum_address: result_ethereum_dic.update({'ethereum_address':ethereum_address[0].ethereum_pk, 'ethereum_interface': ethereum_address[0].ethereum_address, 'address_node':ethereum_address[0].ethereum_node_address}) return result_ethereum_dic
stanta/darfchain
[ 10, 15, 10, 2, 1496296948 ]
def _gas_for_signature(self): ethereum_setting = {} if self.get_ethereum_addres()[0].keys() == {}: result_of_gas_estimate = 0 else: date_of_synchronization = dt.now() ethereum_setting = self.get_ethereum_addres() ethereum_setting = ethereum_setting[0] web3 = Web3(HTTPProvider(ethereum_setting['address_node'])) abi_json = ethereum_setting['ethereum_interface'] ethereum_contract_address = ethereum_setting['ethereum_address'] contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address) hash_of_synchronaze = '"'+base58.b58encode(str(date_of_synchronization))+'"' md5 = self.getDocumentMD5() md5_for_solidity = '"'+md5[0]+'"' print hash_of_synchronaze try: result_of_gas_estimate = contract.estimateGas().setDocumentHash(str(hash_of_synchronaze),md5_for_solidity) except: result_of_gas_estimate = 0 self.gas_for_signature = result_of_gas_estimate return result_of_gas_estimate
stanta/darfchain
[ 10, 15, 10, 2, 1496296948 ]
def _gas_limit(self): ethereum_setting = {} if self.get_ethereum_addres()[0].keys() == {}: result_of_gas_limit = 0 else: ethereum_setting = self.get_ethereum_addres() ethereum_setting = ethereum_setting[0] web3 = Web3(HTTPProvider(ethereum_setting['address_node'])) abi_json = ethereum_setting['ethereum_interface'] ethereum_contract_address = ethereum_setting['ethereum_address'] contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address) result_of_gas_limit = contract.call().getGasLimit() self.gas_limit = result_of_gas_limit return result_of_gas_limit
stanta/darfchain
[ 10, 15, 10, 2, 1496296948 ]
def signature_action(self): ethereum_setting = {} date_of_synchronization = dt.now() ethereum_setting = {} ethereum_setting = self.get_ethereum_addres() ethereum_setting = ethereum_setting[0] web3 = Web3(HTTPProvider(ethereum_setting['address_node'])) abi_json = ethereum_setting['ethereum_interface'] ethereum_contract_address = ethereum_setting['ethereum_address'] contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address) hash_of_synchronaze = '"'+base58.b58encode(str(date_of_synchronization))+'"' print hash_of_synchronaze md5 = self.getDocumentMD5() md5_for_solidity = '"'+md5[0]+'"' TransactionHashEthereum = contract.transact().setDocumentHash(str(hash_of_synchronaze),str(md5_for_solidity)) self.signature_timestamp = str(date_of_synchronization) self.signature_hash = TransactionHashEthereum self.signature_status = True self.env['journal.signature'].create({'name':self.name, 'checksum':md5[0], 'hash_of_signature':TransactionHashEthereum, 'timestamp_of_document':self.signature_timestamp, 'date_of_signature':date_of_synchronization}) root = etree.Element("data") sale_order_name = etree.SubElement(root,'name') sale_order_name.text=self.name sale_order_hash = etree.SubElement(root,'transaction_hash') sale_order_hash.text=TransactionHashEthereum sale_order_md5 = etree.SubElement(root,'md5') sale_order_md5.text=md5[0] xml_result = etree.tostring(root, pretty_print=False) #xml_result = xml_result.replace('"','\\"') #-------------------------------------------- write xml to temp file file_to_save_with_path = '/tmp/'+self.name+str(date_of_synchronization) temp_file = open(file_to_save_with_path,'w') temp_file.write(xml_result) temp_file.close() string = '/usr/bin/putbigchaindb.py --xml="'+file_to_save_with_path+'"' os.system(string)
stanta/darfchain
[ 10, 15, 10, 2, 1496296948 ]
def check_signature_action(self): date_of_synchronization = dt.now() ethereum_setting = self.get_ethereum_addres() ethereum_setting = ethereum_setting[0] web3 = Web3(HTTPProvider(ethereum_setting['address_node'])) abi_json = ethereum_setting['ethereum_interface'] ethereum_contract_address = ethereum_setting['ethereum_address'] contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address) get_transact = web3.eth.getTransaction(self.signature_hash) timestamp = str(contract.call(get_transact).getDocumentHash().replace('"','')) md5 = self.getDocumentMD5() md5_from_contract = contract.call(get_transact).getDocumentMD5() if str(md5_from_contract).replace('"', '') == md5[0]: self.result_of_check = 'OK' else: self.result_of_check = 'Error Checksum'
stanta/darfchain
[ 10, 15, 10, 2, 1496296948 ]
def fetch_production(zone_key='NL', session=None, target_datetime=None, logger=logging.getLogger(__name__), energieopwek_nl=True): if target_datetime is None: target_datetime = arrow.utcnow() else: target_datetime = arrow.get(target_datetime) r = session or requests.session() consumptions = ENTSOE.fetch_consumption(zone_key=zone_key, session=r, target_datetime=target_datetime, logger=logger) if not consumptions: return for c in consumptions: del c['source'] df_consumptions = pd.DataFrame.from_dict(consumptions).set_index( 'datetime') # NL has exchanges with BE, DE, NO, GB, DK-DK1 exchanges = [] for exchange_key in ['BE', 'DE', 'GB']: zone_1, zone_2 = sorted([exchange_key, zone_key]) exchange = ENTSOE.fetch_exchange(zone_key1=zone_1, zone_key2=zone_2, session=r, target_datetime=target_datetime, logger=logger) if not exchange: return exchanges.extend(exchange or []) # add NO data, fetch once for every hour # This introduces an error, because it doesn't use the average power flow # during the hour, but rather only the value during the first minute of the # hour! zone_1, zone_2 = sorted(['NO', zone_key]) exchange_NO = [statnett.fetch_exchange(zone_key1=zone_1, zone_key2=zone_2, session=r, target_datetime=dt.datetime, logger=logger) for dt in arrow.Arrow.range( 'hour', arrow.get(min([e['datetime'] for e in exchanges])).replace(minute=0), arrow.get(max([e['datetime'] for e in exchanges])).replace(minute=0))] exchanges.extend(exchange_NO) # add DK1 data (only for dates after operation) if target_datetime > arrow.get('2019-08-24', 'YYYY-MM-DD') : zone_1, zone_2 = sorted(['DK-DK1', zone_key]) df_dk = pd.DataFrame(DK.fetch_exchange(zone_key1=zone_1, zone_key2=zone_2, session=r, target_datetime=target_datetime, logger=logger)) # Because other exchanges and consumption data is only available per hour # we floor the timpstamp to hour and group by hour with averaging of netFlow df_dk['datetime'] = df_dk['datetime'].dt.floor('H') exchange_DK = df_dk.groupby(['datetime']).aggregate({'netFlow' : 'mean', 'sortedZoneKeys': 'max', 'source' : 'max'}).reset_index() # because averaging with high precision numbers leads to rounding errors exchange_DK = exchange_DK.round({'netFlow': 3}) exchanges.extend(exchange_DK.to_dict(orient='records')) # We want to know the net-imports into NL, so if NL is in zone_1 we need # to flip the direction of the flow. E.g. 100MW for NL->DE means 100MW # export to DE and needs to become -100MW for import to NL. for e in exchanges: if(e['sortedZoneKeys'].startswith('NL->')): e['NL_import'] = -1 * e['netFlow'] else: e['NL_import'] = e['netFlow'] del e['source'] del e['netFlow'] df_exchanges = pd.DataFrame.from_dict(exchanges).set_index('datetime') # Sum all exchanges to NL imports df_exchanges = df_exchanges.groupby('datetime').sum() # Fill missing values by propagating the value forward df_consumptions_with_exchanges = df_consumptions.join(df_exchanges).fillna( method='ffill', limit=3) # Limit to 3 x 15min # Load = Generation + netImports # => Generation = Load - netImports df_total_generations = (df_consumptions_with_exchanges['consumption'] - df_consumptions_with_exchanges['NL_import']) # Fetch all production # The energieopwek_nl parser is backwards compatible with ENTSOE parser. # Because of data quality issues we switch to using energieopwek, but if # data quality of ENTSOE improves we can switch back to using a single # source. productions_ENTSOE = ENTSOE.fetch_production(zone_key=zone_key, session=r, target_datetime=target_datetime, logger=logger) if energieopwek_nl: productions_eopwek = fetch_production_energieopwek_nl(session=r, target_datetime=target_datetime, logger=logger) # For every production value we look up the corresponding ENTSOE # values and copy the nuclear, gas, coal, biomass and unknown production. productions = [] for p in productions_eopwek: entsoe_value = next((pe for pe in productions_ENTSOE if pe["datetime"] == p["datetime"]), None) if entsoe_value: p["production"]["nuclear"] = entsoe_value["production"]["nuclear"] p["production"]["gas"] = entsoe_value["production"]["gas"] p["production"]["coal"] = entsoe_value["production"]["coal"] p["production"]["biomass"] = entsoe_value["production"]["biomass"] p["production"]["unknown"] = entsoe_value["production"]["unknown"] productions.append(p) else: productions = productions_ENTSOE if not productions: return # Flatten production dictionaries (we ignore storage) for p in productions: # if for some reason theré's no unknown value if not 'unknown' in p['production'] or p['production']['unknown'] == None: p['production']['unknown'] = 0
corradio/electricitymap
[ 2764, 774, 2764, 221, 1463848577 ]
def fetch_production_energieopwek_nl(session=None, target_datetime=None, logger=logging.getLogger(__name__)): if target_datetime is None: target_datetime = arrow.utcnow() # Get production values for target and target-1 day df_current = get_production_data_energieopwek( target_datetime, session=session) df_previous = get_production_data_energieopwek( target_datetime.shift(days=-1), session=session) # Concat them, oldest first to keep chronological order intact df = pd.concat([df_previous, df_current]) output = [] base_time = arrow.get(target_datetime.date(), 'Europe/Paris').shift(days=-1).to('utc') for i, prod in enumerate(df.to_dict(orient='records')): output.append( { 'zoneKey': 'NL', 'datetime': base_time.shift(minutes=i*15).datetime, 'production': prod, 'source': 'energieopwek.nl, entsoe.eu' } ) return output
corradio/electricitymap
[ 2764, 774, 2764, 221, 1463848577 ]
def _fromUtf8(s): return s
Alexsays/Kindle-Sync
[ 2, 1, 2, 1, 1395888753 ]
def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding)
Alexsays/Kindle-Sync
[ 2, 1, 2, 1, 1395888753 ]
def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig)
Alexsays/Kindle-Sync
[ 2, 1, 2, 1, 1395888753 ]
def __init__(self, parent=None, flags=QtCore.Qt.Dialog): super(Ui_SearchDialog, self).__init__(parent, flags) self.setupUi(self)
Alexsays/Kindle-Sync
[ 2, 1, 2, 1, 1395888753 ]
def closeClicked(self): print "clicked" self.closeSignal.emit()
Alexsays/Kindle-Sync
[ 2, 1, 2, 1, 1395888753 ]
def main(): repos_file, beaker_file = parse_args() repos = load_secret_data(repos_file) inject_repos(repos, beaker_file)
oVirt/jenkins
[ 16, 9, 16, 6, 1366658775 ]