function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def __init__(self): '''initialize configuration''' # mode of operation self.mode = None # mapping of targets to lists of backends to use when backing up / restoring them self.target_backends = {} # mapping of targets to lists of entities to include when backing up self.target_includes = {} # mapping of targets to lists of entities to exclude when backing up self.target_excludes = {} # output log level # currently supports 'quiet', 'normal', 'verbose', 'debug' self.log_level = 'normal' # output format to backup / restore self.outputformat = 'snapfile' # location of the snapfile to backup to / restore from self.snapfile = None # Encryption/decryption password to use, if left as None, encryption will be disabled self.encryption_password = None
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def log_level_at_least(self, comparison): return (comparison == 'quiet') or \ (comparison == 'normal' and self.log_level != 'quiet') or \ (comparison == 'verbose' and (self.log_level == 'verbose' or self.log_level == 'debug')) or \ (comparison == 'debug' and self.log_level == 'debug')
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def __init__(self, config_file): ''' Initialize the config file, specifying its path @param file - the path to the file to load ''' # if config file doesn't exist, just ignore if not os.path.exists(config_file): if snap.config.options.log_level_at_least("verbose"): snap.callback.snapcallback.warn("Config file " + config_file + " not found") else: self.parser = ConfigParser.ConfigParser() self.parser.read(config_file) self.__parse()
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def string_to_array(string): '''Static helper to convert a colon deliminated string to an array of strings''' return string.split(':')
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def __get_bool(self, key, section='main'): ''' Retreive the indicated boolean value from the config file @param key - the string key corresponding to the boolean value to retrieve @param section - the section to retrieve the value from @returns - the value or False if not found ''' try: return ConfigFile.string_to_bool(self.parser.get(section, key)) except: return None
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def __get_array(self, section='main'): '''return array of key/value pairs from the config file section
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def __parse(self): '''parse configuration out of the config file''' for backend in SnapshotTarget.BACKENDS: val = self.__get_bool(backend) if val is not None: snap.config.options.target_backends[backend] = val else: val = self.__get_string(backend) if val: snap.config.options.target_backends[backend] = True val = ConfigFile.string_to_array(val) for include in val: if include[0] == '!': snap.config.options.target_excludes[backend].append(include[1:]) else: snap.config.options.target_includes[backend].append(include) else: val = self.__get_bool('no' + backend) if val: snap.config.options.target_backends[backend] = False of = self.__get_string('outputformat') sf = self.__get_string('snapfile') ll = self.__get_string('loglevel') enp = self.__get_string('encryption_password')
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def read_config(self): # add conf stored in resources if running from local checkout CONFIG_FILES.append(os.path.join(os.path.dirname(__file__), "..", "resources", "snap.conf")) for config_file in CONFIG_FILES: ConfigFile(config_file)
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def verify_integrity(self): ''' verify the integrity of the current option set @raises - ArgError if the options are invalid ''' if snap.config.options.mode == None: # mode not specified raise snap.exceptions.ArgError("Must specify backup or restore") if snap.config.options.snapfile == None: # need to specify snapfile location raise snap.exceptions.ArgError("Must specify snapfile") # TODO verify output format is one of permitted types if snap.config.options.outputformat == None: # need to specify output format raise snap.exceptions.ArgError("Must specify valid output format")
movitto/snap
[ 22, 2, 22, 6, 1320352462 ]
def __init__(self, engine, txid, msg, callback=None, max_duration=5000, max_concurrency=3): self.engine = engine self.callback = callback self.machine = Machine(model=self, states=self.states, transitions=self.transitions, initial='initialised') self.start = self.last_change = time.time() * 1000 self.max_duration = max_duration self.max_concurrency = max_concurrency self.txid = txid self.times = {} self.parse_message(msg) self.query()
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def query(self): pass
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def is_complete(self): return self.state in ['complete', 'timedout']
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def pack_request(self): return None
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def unpack_response(content): return None
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def pack_response(content): return None
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def duration(self): return time.time() * 1000 - self.start
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def _send_query(self): pass
shendo/peerz
[ 8, 3, 8, 2, 1402811281 ]
def accept_upload(conf, dud, event_emitter): ''' Accept the upload and move its data to the right places. ''' job_success = dud.get('X-Spark-Success') == 'Yes' job_id = dud.get('X-Spark-Job') # mark job as accepted and done with session_scope() as session: job = session.query(Job).filter(Job.uuid == job_id).one_or_none() if not job: log.error('Unable to mark job \'{}\' as done: The Job was not found.'.format(job_id)) # this is a weird situation, there is no proper way to handle it as this indicates a bug # in the Laniakea setup or some other oddity. # The least harmful thing to do is to just leave the upload alone and try again later. return job.result = JobResult.SUCCESS if job_success else JobResult.FAILURE job.latest_log_excerpt = None # move the log file and Firehose reports to the log storage log_target_dir = os.path.join(conf.log_storage_dir, get_dir_shorthand_for_uuid(job_id)) firehose_target_dir = os.path.join(log_target_dir, 'firehose') for fname in dud.get_files(): if fname.endswith('.log'): os.makedirs(log_target_dir, exist_ok=True) # move the logfile to its destination and ensure it is named correctly target_fname = os.path.join(log_target_dir, job_id + '.log') safe_rename(fname, target_fname) elif fname.endswith('.firehose.xml'): os.makedirs(firehose_target_dir, exist_ok=True) # move the firehose report to its own directory and rename it fh_target_fname = os.path.join(firehose_target_dir, job_id + '.firehose.xml') safe_rename(fname, fh_target_fname) # handle different job data if job.module == LkModule.ISOTOPE: from .import_isotope import handle_isotope_upload handle_isotope_upload(session, success=job_success, conf=conf, dud=dud, job=job, event_emitter=event_emitter) elif job.kind == JobKind.PACKAGE_BUILD: # the package has been imported by Dak, so we just announce this # event to the world spkg = session.query(SourcePackage) \ .filter(SourcePackage.source_uuid == job.trigger) \ .filter(SourcePackage.version == job.version) \ .one_or_none() if spkg: suite_target_name = '?' if job.data: suite_target_name = job.data.get('suite', '?') event_data = {'pkgname': spkg.name, 'version': job.version, 'architecture': job.architecture, 'suite': suite_target_name, 'job_id': job_id} if job_success: event_emitter.submit_event_for_mod(LkModule.ARCHIVE, 'package-build-success', event_data) else: event_emitter.submit_event_for_mod(LkModule.ARCHIVE, 'package-build-failed', event_data) else: event_emitter.submit_event('upload-accepted', {'job_id': job_id, 'job_failed': not job_success}) # remove the upload description file from incoming os.remove(dud.get_dud_file()) log.info("Upload {} accepted.", dud.get_filename())
lkorigin/laniakea
[ 20, 8, 20, 1, 1465596960 ]
def import_files_from(conf, incoming_dir): ''' Import files from an untrusted incoming source. IMPORTANT: We assume that the uploader can not edit their files post-upload. If they could, we would be vulnerable to timing attacks here. ''' emitter = EventEmitter(LkModule.RUBICON) for dud_file in glob(os.path.join(incoming_dir, '*.dud')): dud = Dud(dud_file) try: dud.validate(keyrings=conf.trusted_gpg_keyrings) except Exception as e: reason = 'Signature validation failed: {}'.format(str(e)) reject_upload(conf, dud, reason, emitter) continue # if we are here, the file is good to go accept_upload(conf, dud, emitter)
lkorigin/laniakea
[ 20, 8, 20, 1, 1465596960 ]
def ready(self): super(PeopleAppConfig, self).ready()
kartoza/geonode
[ 6, 17, 6, 113, 1439813567 ]
def choose(n, k): if k > n: return 0 elif n < MOD: return (fact[n]/fact[n-k]/fact[k])%MOD else: prod = 1 while n > 0: prod *= choose(n%MOD, k%MOD) prod %= MOD n /= MOD k /= MOD return prod
KirarinSnow/Google-Code-Jam
[ 84, 38, 84, 1, 1276377660 ]
def set_assets_node(self, assets): if not isinstance(assets, list): assets = [assets] node_id = self.request.query_params.get('node_id') if not node_id: return node = get_object_or_none(Node, pk=node_id) if not node: return node.assets.add(*assets)
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def get_object(self): asset_pk = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_pk) return asset.platform
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def check_object_permissions(self, request, obj): if request.method.lower() in ['delete', 'put', 'patch'] and obj.internal: self.permission_denied( request, message={"detail": "Internal platform"} ) return super().check_object_permissions(request, obj)
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def perform_assets_task(self, serializer): data = serializer.validated_data action = data['action'] assets = data.get('assets', []) if action == "refresh": task = update_assets_hardware_info_manual.delay(assets) else: # action == 'test': task = test_assets_connectivity_manual.delay(assets) return task
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def set_task_to_serializer_data(self, serializer, task): data = getattr(serializer, '_data', {}) data["task"] = task.id setattr(serializer, '_data', data)
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def create(self, request, *args, **kwargs): pk = self.kwargs.get('pk') request.data['asset'] = pk request.data['assets'] = [pk] return super().create(request, *args, **kwargs)
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def perform_asset_task(self, serializer): data = serializer.validated_data action = data['action'] if action not in ['push_system_user', 'test_system_user']: return asset = data['asset'] system_users = data.get('system_users') if not system_users: system_users = asset.get_all_system_users() if action == 'push_system_user': task = push_system_users_a_asset.delay(system_users, asset=asset) elif action == 'test_system_user': task = test_system_users_connectivity_a_asset.delay(system_users, asset=asset) else: task = None return task
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def get_queryset(self): asset_id = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_id) if not asset.domain: return [] queryset = asset.domain.gateways.filter(protocol='ssh') return queryset
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def get_object(self): asset_id = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_id) return asset
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def get_queryset(self): perms = self.get_asset_related_perms() users = User.objects.filter( Q(assetpermissions__in=perms) | Q(groups__assetpermissions__in=perms) ).distinct() return users
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def get_queryset(self): perms = self.get_asset_related_perms() user_groups = UserGroup.objects.filter(assetpermissions__in=perms).distinct() return user_groups
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def get_object(self): asset_id = self.kwargs.get('pk') asset = get_object_or_404(Asset, pk=asset_id) return asset
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) queryset = self.filter_asset_related(queryset) return queryset
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) queryset = self.filter_user_related(queryset) queryset = queryset.distinct() return queryset
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def get_perm_user(self): user_id = self.kwargs.get('perm_user_id') user = get_object_or_404(User, pk=user_id) return user
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) queryset = self.filter_user_group_related(queryset) queryset = queryset.distinct() return queryset
jumpserver/jumpserver
[ 19948, 4806, 19948, 134, 1404446099 ]
def main(): usage = "%prog [options] <song-set> <results-file0> [<results-file1> ...]" parser = OptionParser(usage=usage) parser.add_option("--popt", "--parser-options", dest="popts", action="append", help="specify options for the parser that interprets the gold standard annotations. Type '--popt help' to get a list of options (we use a DirectedCkyParser)") parser.add_option("-m", "--metric", dest="metric", action="store", help="semantics distance metric to use. Use '-m help' for a list of available metrics") parser.add_option("--mopt", "--metric-options", dest="mopts", action="append", help="options to pass to the semantics metric. Use with '--mopt help' with -m to see available options") parser.add_option("-r", "--print-results", dest="print_results", action="store", default=5, type="int", help="number of top search results to print for each query (parse result). Default: 5. Use -1 to print distances from all songs in the corpus") parser.add_option("-g", "--gold-only", dest="gold_only", action="store_true", help="skip results that have no gold standard sequence associated with them (we can't tell which is the right answer for these)") parser.add_option("--mc", "--metric-computation", dest="metric_computation", action="store_true", help="output the computation information for the metric between the parse result and each top search result") options, arguments = parser.parse_args()
markgw/jazzparser
[ 5, 1, 5, 1, 1368367354 ]
def __init__( self, ic: IdentityController, api_factory: APIFactory, start_sub: str = None): Gtk.Window.__init__(self, title='Something For Reddit', icon_name='today.sam.reddit-is-gtk') self.add_events(Gdk.EventMask.KEY_PRESS_MASK) self.set_default_size(600, 600) self.set_wmclass("reddit-is-gtk", "Something For Reddit") self._ic = ic self._ic.token_changed.connect(self._token_changed_cb) self._api = None self._api_factory = api_factory settings = Gtk.Settings.get_default() screen = Gdk.Screen.get_default() css_provider = Gtk.CssProvider.get_default() if settings.props.gtk_application_prefer_dark_theme: css_provider.load_from_resource( '/today/sam/reddit-is-gtk/style.dark.css') else: css_provider.load_from_resource( '/today/sam/reddit-is-gtk/style.css') context = Gtk.StyleContext() context.add_provider_for_screen(screen, css_provider, Gtk.STYLE_PROVIDER_PRIORITY_USER) self._paned = Gtk.Paned.new(Gtk.Orientation.HORIZONTAL) self.add(self._paned) self._paned.show() self._webview = FullscreenableWebview() self._webview_bin = ProgressContainer(self._webview) self._comments = None self._stack = Gtk.Stack() self._stack.connect('notify::visible-child', self.__stack_child_cb) self._paned.add2(self._stack) #self._paned.child_set_property(self._stack, 'shrink', True) self._stack.show() self._sublist_bin = Gtk.Box() self._paned.add1(self._sublist_bin) self._sublist_bin.show() self._sublist = None self._make_header() left = Gtk.SizeGroup(mode=Gtk.SizeGroupMode.HORIZONTAL) left.add_widget(self._left_header) left.add_widget(self._sublist_bin) self._paned.connect('notify::position', self.__notify_position_cb, self._header_paned) self._header_paned.connect('notify::position', self.__notify_position_cb, self._paned) self._token_changed_cb(self._ic)
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def connect_api(self, api: RedditAPI): start_sub = None if start_sub is None: start_sub = get_settings()['default-sub'] if self._api is not None: # TODO: swap right panel print('Swapping', self._api, 'for', api) start_sub = self._sublist.get_uri() # FIXME: do we need to disconnect the callbacks? self._sublist.destroy() self._subentry.destroy() self._api = api self._api.request_failed.connect(self.__request_failed_cb) self._sublist = SubList(self._api, start_sub) self._sublist.new_other_pane.connect(self.__new_other_pane_cb) self._sublist_bin.add(self._sublist) #self._paned.child_set_property(self._sublist, 'shrink', True) self._sublist.show() self._subentry = SubEntry(self._api, start_sub) self._subentry.activate.connect(self.__subentry_activate_cb) self._subentry.escape_me.connect(self.__subentry_escape_me_cb) self._left_header.props.custom_title = self._subentry self._subentry.show()
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def do_event(self, event): if event.type != Gdk.EventType.KEY_PRESS: return if isinstance(self.get_focus(), (Gtk.TextView, Gtk.Entry)): return if event.keyval == Gdk.KEY_F6: self._subentry.focus() return True if event.keyval == Gdk.KEY_1: self._sublist.focus() return True if event.keyval == Gdk.KEY_2: self._stack.set_visible_child(self._comments) self._comments.focus() return True if event.keyval == Gdk.KEY_3: self._stack.set_visible_child(self._webview_bin) self._webview.grab_focus() return True if event.state & Gdk.ModifierType.MOD1_MASK: if event.keyval == Gdk.KEY_Left: self._webview.go_back() return True if event.keyval == Gdk.KEY_Right: self._webview.go_forward() return True
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def load_uri_from_label(self, uri): is_relative = not uri.startswith('http') is_reddit = re.match('https?:\/\/(www\.|np\.)?reddit\.com\/', uri) if is_relative or is_reddit: self.goto_reddit_uri(uri) return self._stack.set_visible_child(self._webview_bin) self._webview.load_uri(uri)
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def _make_header(self): self._header_paned = Gtk.Paned() self.set_titlebar(self._header_paned) self._left_header = Gtk.HeaderBar() layout = Gtk.Settings.get_default().props.gtk_decoration_layout self._left_header.set_decoration_layout(layout.split(':')[0]) self._right_header = Gtk.HeaderBar() self._right_header.set_decoration_layout(':'+layout.split(':')[1]) self._right_header.props.show_close_button = True self._header_paned.add1(self._left_header) self._header_paned.child_set_property( self._left_header, 'shrink', False) self._header_paned.add2(self._right_header) self._header_paned.child_set_property( self._right_header, 'shrink', False) self._header_paned.show_all() self._identity = IdentityButton(self._ic) self._right_header.pack_start(self._identity) self._identity.show() self._stack_switcher = Gtk.StackSwitcher(stack=self._stack) self._right_header.pack_end(self._stack_switcher) self._stack_switcher.show() self._webview_toolbar = WebviewToolbar(self._webview) self._right_header.pack_end(self._webview_toolbar)
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def get_sublist(self): return self._sublist
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def goto_sublist(self, to): ''' Public api for children: widget.get_toplevel().goto_sublist('/u/samdroid_/overview') ''' self._sublist.goto(to) self._subentry.goto(to)
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def __cv_got_post_data_cb(self, cv, post): if not post.get('is_self') and 'url' in post: self.__new_other_pane_cb(None, post['url'], cv, True)
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def __subentry_escape_me_cb(self, entry): self._sublist.focus()
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def __init__(self, ic: IdentityController, api_factory: APIFactory): Gtk.Application.__init__(self, application_id='today.sam.reddit-is-gtk') self.connect('startup', self.__do_startup_cb) GLib.set_application_name("Something For Reddit") GLib.set_prgname("reddit-is-gtk") self._w = None self._queue_uri = None self._ic = ic self._api_factory = api_factory
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def goto_reddit_uri(self, uri): if self._w is None: self._queue_uri = uri else: self._w.goto_reddit_uri(uri)
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def __do_startup_cb(self, app): actions = [('about', self.__about_cb), ('quit', self.__quit_cb), ('issues', self.__issues_cb), ('shortcuts', self.__shortcuts_cb), ('settings', self.__settings_cb)] for name, cb in actions: a = Gio.SimpleAction.new(name, None) a.connect('activate', cb) self.add_action(a) builder = Gtk.Builder.new_from_resource( '/today/sam/reddit-is-gtk/app-menu.ui') self._menu = builder.get_object('app-menu') self.props.app_menu = self._menu
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def __issues_cb(self, action, param): webviews.open_uri_external( 'https://github.com/samdroid-apps/something-for-reddit/issues')
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def __shortcuts_cb(self, action, param): builder = Gtk.Builder.new_from_resource( '/today/sam/reddit-is-gtk/shortcuts-window.ui') builder.get_object('window').show()
samdroid-apps/something-for-reddit
[ 163, 15, 163, 18, 1459079393 ]
def draw_background(setup) : canvas = setup['canvas'] image = Image.new('RGBA', canvas, tuple(setup['color']['back'])) background = Image.new('RGBA', canvas, (0,0,0,0)) draw = ImageDraw.Draw(background) stars = [[ int(p * random()) for p in canvas ] for x in range(400) ] scale = lambda x, r : x + r * (min(canvas) / 320) color = (255, 255, 255, 100) for x, y in stars : r = random() draw.ellipse([x, y, scale(x, r), scale(y, r)], fill=color) return Image.alpha_composite(image, background)
vojtatom/planets
[ 2, 1, 2, 3, 1491924309 ]
def apply_ray_effect(sun_image, setup) : canvas = setup['canvas'] width, height = setup['canvas'][0], setup['canvas'][1] decay = 0.8 density = 1.2 samples = 128 center = [ x / 2 for x in setup['canvas'] ] list_of_pixels = list(sun_image.getdata()) new_image = [] print("starting postprocessing...") for y in range(height) : print("\rjob completed {0:.2f}%".format(round(100 * (y / height), 2)), flush=True, end="") for x in range(width) : tc = [x, y] delta = [ (x - center[0]) / (samples * density), (y - center[1]) / (samples * density) ] color = list_of_pixels[x + y * width] illumination = 1 for m in range(samples) : tc = [ tc[0] - delta[0], tc[1] - delta[1]] add_color = tuple( illumination * x for x in list_of_pixels[int(tc[0]) + int(tc[1]) * width] ) illumination *= decay color = tuple( x + y for x, y in zip(color, add_color)) new_image.append(tuple(int(x) for x in color)) image = Image.new(sun_image.mode, sun_image.size) image.putdata(new_image) return image
vojtatom/planets
[ 2, 1, 2, 3, 1491924309 ]
def create_sun(setup) : canvas, size = setup['canvas'], setup['size'] d = min([x * 0.08 * 5 * size for x in canvas]) planet = [ (x - d) / 2 for x in canvas ] planet.append(planet[0] + d) planet.append(planet[1] + d) setup['sun'] = planet setup['diam'] = d setup['rad'] = d / 2 setup['center'] = [ planet[0] + d / 2, planet[1] + d / 2 ]
vojtatom/planets
[ 2, 1, 2, 3, 1491924309 ]
def sun(setup) : setup = sun_setup(setup) create_sun(setup) image = draw_background(setup) image = draw_sun(image, setup) canvas = [ int(x / 2) for x in setup['canvas'] ] resized = image.resize(canvas, Image.ANTIALIAS) resized.save("test.png")
vojtatom/planets
[ 2, 1, 2, 3, 1491924309 ]
def main(name, brief, debug, rec_debug, **unused_options): global stack if not os.path.isfile(name): print(name, "is an invalid file name!", file=sys.stderr) return 1 arch = get_archive(name) stack.append((name, arch)) if debug or brief: show_log(arch, rec_debug, brief) raise SystemExit(0) else: show(name, arch) while 1: try: toks = stdin_input('? ').split(None, 1) except EOFError: # Ctrl-D print(file=sys.stderr) # Clear line. break if not toks: usage() continue if len(toks) == 1: cmd = toks[0] arg = '' else: cmd, arg = toks cmd = cmd.upper() if cmd == 'U': if len(stack) > 1: arch = stack[-1][1] arch.lib.close() del stack[-1] name, arch = stack[-1] show(name, arch) elif cmd == 'O': if not arg: arg = stdin_input('open name? ') arg = arg.strip() try: arch = get_archive(arg) except NotAnArchiveError as e: print(e, file=sys.stderr) continue if arch is None: print(arg, "not found", file=sys.stderr) continue stack.append((arg, arch)) show(arg, arch) elif cmd == 'X': if not arg: arg = stdin_input('extract name? ') arg = arg.strip() data = get_data(arg, arch) if data is None: print("Not found", file=sys.stderr) continue filename = stdin_input('to filename? ') if not filename: print(repr(data)) else: with open(filename, 'wb') as fp: fp.write(data) elif cmd == 'Q': break else: usage() do_cleanup()
etherkit/OpenBeacon2
[ 13, 2, 13, 5, 1355386213 ]
def usage(): print("U: go Up one level", file=sys.stderr) print("O <name>: open embedded archive name", file=sys.stderr) print("X <name>: extract name", file=sys.stderr) print("Q: quit", file=sys.stderr)
etherkit/OpenBeacon2
[ 13, 2, 13, 5, 1355386213 ]
def get_data(name, arch): if isinstance(arch.toc, dict): (ispkg, pos, length) = arch.toc.get(name, (0, None, 0)) if pos is None: return None with arch.lib: arch.lib.seek(arch.start + pos) return zlib.decompress(arch.lib.read(length)) ndx = arch.toc.find(name) dpos, dlen, ulen, flag, typcd, name = arch.toc[ndx] x, data = arch.extract(ndx) return data
etherkit/OpenBeacon2
[ 13, 2, 13, 5, 1355386213 ]
def get_content(arch, recursive, brief, output): if isinstance(arch.toc, dict): toc = arch.toc if brief: for name, _ in toc.items(): output.append(name) else: output.append(toc) else: toc = arch.toc.data for el in toc: if brief: output.append(el[5]) else: output.append(el) if recursive: if el[4] in ('z', 'a'): get_content(get_archive(el[5]), recursive, brief, output) stack.pop()
etherkit/OpenBeacon2
[ 13, 2, 13, 5, 1355386213 ]
def get_archive_content(filename): """ Get a list of the (recursive) content of archive `filename`. This function is primary meant to be used by runtests. """ archive = get_archive(filename) stack.append((filename, archive)) output = [] get_content(archive, recursive=True, brief=True, output=output) do_cleanup() return output
etherkit/OpenBeacon2
[ 13, 2, 13, 5, 1355386213 ]
def checkmagic(self): """ Overridable. Check to see if the file object self.lib actually has a file we understand. """ self.lib.seek(self.start) # default - magic is at start of file. if self.lib.read(len(self.MAGIC)) != self.MAGIC: raise RuntimeError("%s is not a valid %s archive file" % (self.path, self.__class__.__name__)) if self.lib.read(len(self.pymagic)) != self.pymagic: print("Warning: pyz is from a different Python version", file=sys.stderr) self.lib.read(4)
etherkit/OpenBeacon2
[ 13, 2, 13, 5, 1355386213 ]
def __init__(self, jss): """Initialize a new CommandFlush Args: jss: JSS object. """ self.jss = jss
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def url(self): """Return the path subcomponent of the url to this object.""" return self._endpoint_path
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def command_flush_for(self, id_type, command_id, status): """Flush commands for an individual device. Args: id_type (str): One of 'computers', 'computergroups', 'mobiledevices', or 'mobiledevicegroups'. id_value (str, int, list): ID value(s) for the devices to flush. More than one device should be passed as IDs in a list or tuple. status (str): One of 'Pending', 'Failed', 'Pending+Failed'. Raises: DeleteError if provided url_path has a >= 400 response. """ id_types = ('computers', 'computergroups', 'mobiledevices', 'mobiledevicegroups') status_types = ('Pending', 'Failed', 'Pending+Failed') if id_type not in id_types or status not in status_types: raise ValueError("Invalid arguments.") if isinstance(command_id, list): command_id = ",".join(str(item) for item in command_id) flush_url = "{}/{}/id/{}/status/{}".format( self.url, id_type, command_id, status) self.jss.delete(flush_url)
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def __init__(self, j, resource_type, id_type, _id, resource): """Prepare a new FileUpload. Args: j: A JSS object to POST the upload to. resource_type: String. Acceptable Values: Attachments: computers mobiledevices enrollmentprofiles peripherals mobiledeviceenrollmentprofiles Icons: policies ebooks mobiledeviceapplicationsicon Mobile Device Application: mobiledeviceapplicationsipa Disk Encryption diskencryptionconfigurations diskencryptions (synonymous) PPD printers id_type: String of desired ID type: id name _id: Int or String referencing the identity value of the resource to add the FileUpload to. resource: String path to the file to upload. """ resource_types = ["computers", "mobiledevices", "enrollmentprofiles", "peripherals", "mobiledeviceenrollmentprofiles", "policies", "ebooks", "mobiledeviceapplicationsicon", "mobiledeviceapplicationsipa", "diskencryptionconfigurations", "printers"] id_types = ["id", "name"] self.jss = j # Do some basic error checking on parameters. if resource_type in resource_types: self.resource_type = resource_type else: raise TypeError( "resource_type must be one of: %s" % ', '.join(resource_types)) if id_type in id_types: self.id_type = id_type else: raise TypeError("id_type must be one of: %s" % ', '.join(id_types)) self._id = str(_id) basename = os.path.basename(resource) content_type = mimetypes.guess_type(basename)[0] self.resource = {"name": (basename, open(resource, "rb"), content_type)} self._set_upload_url()
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def save(self): """POST the object to the JSS.""" try: response = self.jss.session.post( self._upload_url, files=self.resource) except PostError as error: if error.status_code == 409: raise PostError(error) else: raise MethodNotAllowedError(self.__class__.__name__) if response.status_code == 201: if self.jss.verbose: print("POST: Success") print(response.content) elif response.status_code >= 400: error_handler(PostError, response)
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def __init__(self, jss): """Initialize a new LogFlush Args: jss: JSS object. """ self.jss = jss
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def url(self): """Return the path subcomponent of the url to this object.""" return self._endpoint_path
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def log_flush_for_interval(self, log_type, interval): """Flush logs for an interval of time. Args: log_type (str): Only documented type is "policies". This will be applied by default if nothing is passed. interval (str): Combination of "Zero", "One", "Two", "Three", "Six", and "Day", "Week", "Month", "Year". e.g. ("Three+Months") Please note: The documentation for this specifies the singular form (e.g. "Month"), and plural ("Months") at different times, and further the construction is listed as "THREE MONTHS" elsewhere. Limited testing indicates that pluralization does not matter, nor does capitalization. Please test! No validation is performed on this prior to the request being made. Raises: DeleteError if provided url_path has a >= 400 response. """ if not log_type: log_type = "policies" # The XML for the /logflush basic endpoint allows spaces # instead of "+", so do a replace here just in case. interval = interval.replace(" ", "+") flush_url = "{}/{}/interval/{}".format( self.url, log_type, interval) self.jss.delete(flush_url)
sheagcraig/python-jss
[ 104, 40, 104, 10, 1402081132 ]
def setUp(self): self.credentials = { 'username': 'testuser', 'password': 'test1234', 'email': '[email protected]'} # Create a test Group my_group, created = Group.objects.get_or_create(name='test_group') # Add user to test Group User.objects.get(pk=1).groups.add(my_group)
OpenDroneMap/WebODM
[ 2125, 778, 2125, 82, 1470665302 ]
def test_views(self): c = Client() # Connecting to dashboard without auth redirects to / res = c.get('/dashboard/', follow=True) self.assertFalse(res.context['user'].is_authenticated) self.assertRedirects(res, '/login/?next=/dashboard/') res = c.get('/processingnode/1/', follow=True) self.assertRedirects(res, '/login/?next=/processingnode/1/') res = c.get('/map/project/1/', follow=True) self.assertRedirects(res, '/login/?next=/map/project/1/') res = c.get('/3d/project/1/task/1/', follow=True) self.assertRedirects(res, '/login/?next=/3d/project/1/task/1/') # Login c.post('/login/', data=self.credentials, follow=True) # We should have a project created from the dashboard self.assertTrue(Project.objects.count() >= 1) # Can access API page res = c.get('/api/') self.assertTrue(res.status_code == status.HTTP_200_OK) # We can access a processingnode view that exists res = c.get('/processingnode/1/') self.assertTrue(res.status_code == 200) self.assertTemplateUsed(res, 'app/processing_node.html') # We can access a processingnode that is offline # (and there's a warning message when we do that) res = c.get('/processingnode/2/') self.assertTrue(res.status_code == 200) self.assertTemplateUsed(res, 'app/processing_node.html') message = list(res.context['messages'])[0] self.assertEqual(message.tags, 'warning') self.assertTrue("offline" in message.message) res = c.get('/processingnode/9999/') self.assertTrue(res.status_code == 404) res = c.get('/processingnode/abc/') self.assertTrue(res.status_code == 404) # /map/ and /3d/ views user = User.objects.get(username="testuser") other_user = User.objects.get(username="testuser2") project = Project.objects.create(owner=user) task = Task.objects.create(project=project) other_project = Project.objects.create(owner=other_user) other_task = Task.objects.create(project=other_project) # Cannot access a project that we have no access to, or that does not exist for project_id in [other_project.id, 99999]: res = c.get('/map/project/{}/'.format(project_id)) self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND) # We can access a project that we have access to res = c.get('/map/project/{}/'.format(project.id)) self.assertTrue(res.status_code == status.HTTP_200_OK) # 3D views need project and task parameters res = c.get('/3d/project/{}/'.format(project.id)) self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND) # Cannot access a 3d view for a task we have no access to res = c.get('/3d/project/{}/task/{}/'.format(other_project.id, other_task.id)) self.assertTrue(res.status_code == status.HTTP_404_NOT_FOUND) # Can access 3d view for task we have access to res = c.get('/3d/project/{}/task/{}/'.format(project.id, task.id)) self.assertTrue(res.status_code == status.HTTP_200_OK) # Cannot access public URLs unless a task is shared def test_public_views(client, expectedStatus): res = client.get('/public/task/{}/map/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/3d/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/iframe/3d/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/iframe/map/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/json/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) test_public_views(c, status.HTTP_404_NOT_FOUND) # Share task task.public = True task.save() # Can now access URLs even as anonymous user ac = Client() test_public_views(ac, status.HTTP_200_OK)
OpenDroneMap/WebODM
[ 2125, 778, 2125, 82, 1470665302 ]
def test_default_group(self): # It exists self.assertTrue(Group.objects.filter(name='Default').count() == 1) # Verify that all new users are assigned to default group u = User.objects.create_user(username="default_user") u.refresh_from_db() self.assertTrue(u.groups.filter(name='Default').count() == 1)
OpenDroneMap/WebODM
[ 2125, 778, 2125, 82, 1470665302 ]
def _compute_is_paynet_contract(self): transmit_method = self.env.ref("ebill_paynet.paynet_transmit_method") for record in self: record.is_paynet_contract = record.transmit_method_id == transmit_method
OCA/l10n-switzerland
[ 49, 155, 49, 19, 1401971092 ]
def _check_paynet_account_number(self): for contract in self: if not contract.is_paynet_contract: continue if not contract.paynet_account_number: raise ValidationError( _("The Paynet ID is required for a Paynet contract.") )
OCA/l10n-switzerland
[ 49, 155, 49, 19, 1401971092 ]
def update(self): result = {} values = {'context': self.context} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
ecreall/nova-ideo
[ 22, 6, 22, 9, 1417421268 ]
def before_update(self): self.action = self.request.resource_url( self.context, 'novaideoapi', query={'op': 'update_action_view', 'node_id': Restor.node_definition.id}) self.schema.widget = deform.widget.FormWidget( css_class='deform novaideo-ajax-form')
ecreall/nova-ideo
[ 22, 6, 22, 9, 1417421268 ]
def create_app(conf): app = Flask(__name__) app.config.update(conf) Bootstrap(app) babel = Babel(app) app.register_blueprint(agherant.agherant, url_prefix='/agherant') @babel.localeselector def get_locale(): return request.accept_languages.best_match(['en', 'it', 'sq']) return app
insomnia-lab/libreant
[ 18, 8, 18, 62, 1416821512 ]
def __init__(self, **kwargs): kwargs["decimal_places"] = 2 for f in ["min_value", "max_value"]: if f in kwargs: kwargs[f] = Decimal(kwargs[f]) / 100 super().__init__(**kwargs)
lafranceinsoumise/api-django
[ 26, 9, 26, 14, 1492079465 ]
def clean(self, value): value = super().clean(value) return value and int(value * 100)
lafranceinsoumise/api-django
[ 26, 9, 26, 14, 1492079465 ]
def __init__( self, *, amount_choices=None, show_tax_credit=True, by_month=False, **kwargs
lafranceinsoumise/api-django
[ 26, 9, 26, 14, 1492079465 ]
def amount_choices(self): return self._amount_choices
lafranceinsoumise/api-django
[ 26, 9, 26, 14, 1492079465 ]
def amount_choices(self, amount_choices): self._amount_choices = amount_choices if self.widget: self.widget.attrs["data-amount-choices"] = json.dumps(self._amount_choices)
lafranceinsoumise/api-django
[ 26, 9, 26, 14, 1492079465 ]
def dump_city(self, city): return { 'confidence': city.confidence, 'geoname_id': city.geoname_id, 'name': city.name, 'names': city.names }
CERT-BDF/Cortex-Analyzers
[ 368, 340, 368, 188, 1484054590 ]
def dump_country(self, country): return { 'confidence': country.confidence, 'geoname_id': country.geoname_id, 'iso_code': country.iso_code, 'name': country.name, 'names': country.names }
CERT-BDF/Cortex-Analyzers
[ 368, 340, 368, 188, 1484054590 ]
def dump_traits(self, traits): return { 'autonomous_system_number': traits.autonomous_system_number, 'autonomous_system_organization': traits.autonomous_system_organization, 'domain': traits.domain, 'ip_address': traits.ip_address, 'is_anonymous_proxy': traits.is_anonymous_proxy, 'is_satellite_provider': traits.is_satellite_provider, 'isp': traits.isp, 'organization': traits.organization, 'user_type': traits.user_type }
CERT-BDF/Cortex-Analyzers
[ 368, 340, 368, 188, 1484054590 ]
def run(self): Analyzer.run(self) if self.data_type == 'ip': try: data = self.get_data() city = geoip2.database.Reader(os.path.dirname(__file__) + '/GeoLite2-City.mmdb').city(data) self.report({ 'city': self.dump_city(city.city), 'continent': self.dump_continent(city.continent), 'country': self.dump_country(city.country), 'location': self.dump_location(city.location), 'registered_country': self.dump_country(city.registered_country), 'represented_country': self.dump_country(city.represented_country), 'subdivisions': self.dump_country(city.subdivisions.most_specific), 'traits': self.dump_traits(city.traits) }) except ValueError as e: self.error('Invalid IP address') except AddressNotFoundError as e: self.error('Unknown IP address') except Exception as e: self.unexpectedError(type(e)) else: self.notSupported()
CERT-BDF/Cortex-Analyzers
[ 368, 340, 368, 188, 1484054590 ]
def materialize_actual_owners_remove(events: list): for event in events: properties = {'$pull': {'owners': event['from']}} DeviceDomain.update_raw(event.get('components', []), properties) return DeviceDomain.update_raw(event['devices'], properties)
eReuse/DeviceHub
[ 2, 1, 2, 13, 1439187776 ]
def path_page(self, seo_url, **kwargs): """Handle SEO urls for ir.ui.views. ToDo: Add additional check for field seo_url_parent. Otherwise it is possible to use invalid url structures. For example: if you have two pages 'study-1' and 'study-2' with the same seo_url_level and different seo_url_parent you can use '/ecommerce/study-1/how-to-do-it-right' and '/ecommerce/study-2/how-to-do-it-right' to call the page 'how-to-do-it-right'. """ env = request.env(context=request.context) seo_url_parts = [s.encode('utf8') for s in seo_url.split('/') if s != ''] views = env['ir.ui.view'].search([('seo_url', 'in', seo_url_parts)], order='seo_url_level ASC') page = 'website.404' if len(seo_url_parts) == len(views): seo_url_check = [v.seo_url.encode('utf8') for v in views] current_view = views[-1] if (seo_url_parts == seo_url_check and (current_view.seo_url_level + 1) == len(views)): page = current_view.xml_id if page == 'website.404': try: url = self.look_for_redirect_url(seo_url, **kwargs) if url: return request.redirect(url, code=301) assert url is not None except Exception, e: return request.registry['ir.http']._handle_exception(e, 404) if page == 'website.404' and request.website.is_publisher(): page = 'website.page_404' return request.render(page, {})
blooparksystems/website
[ 10, 33, 10, 23, 1439476298 ]
def page(self, page, **opt): try: view = request.website.get_template(page) if view.seo_url: return request.redirect(view.get_seo_path()[0], code=301) except: pass return super(Website, self).page(page, **opt)
blooparksystems/website
[ 10, 33, 10, 23, 1439476298 ]
def test_imprint(app, client): app.config["SKYLINES_IMPRINT"] = u"foobar" res = client.get("/imprint") assert res.status_code == 200 assert res.json == {u"content": u"foobar"}
skylines-project/skylines
[ 367, 102, 367, 81, 1324989203 ]
def progress_callback(percentage): sys.stdout.write(str(percentage) + "% ") sys.stdout.flush() # process all files so the user can use wildcards like *.wav
MTG/pycompmusic
[ 27, 18, 27, 5, 1366895285 ]
def _is_file(f): return isinstance(f, file) # noqa
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]
def _is_file(f): return isinstance(f, io.IOBase)
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]
def _pipe_segment_with_colons(align, colwidth): """Return a segment of a horizontal line with optional colons which indicate column's alignment (as in `pipe` output format).""" w = colwidth if align in ["right", "decimal"]: return ('-' * (w - 1)) + ":" elif align == "center": return ":" + ('-' * (w - 2)) + ":" elif align == "left": return ":" + ('-' * (w - 1)) else: return '-' * w
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]
def _mediawiki_row_with_attrs(separator, cell_values, colwidths, colaligns): alignment = {"left": '', "right": 'align="right"| ', "center": 'align="center"| ', "decimal": 'align="right"| '} # hard-coded padding _around_ align attribute and value together # rather than padding parameter which affects only the value values_with_attrs = [' ' + alignment.get(a, '') + c + ' ' for c, a in zip(cell_values, colaligns)] colsep = separator*2 return (separator + colsep.join(values_with_attrs)).rstrip()
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]
def _html_begin_table_without_header(colwidths_ignore, colaligns_ignore): # this table header will be suppressed if there is a header row return "\n".join(["<table>", "<tbody>"])
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]
def _moin_row_with_attrs(celltag, cell_values, colwidths, colaligns, header=''): alignment = {"left": '', "right": '<style="text-align: right;">', "center": '<style="text-align: center;">', "decimal": '<style="text-align: right;">'} values_with_attrs = ["{0}{1} {2} ".format(celltag, alignment.get(a, ''), header+c+header) for c, a in zip(cell_values, colaligns)] return "".join(values_with_attrs)+"||"
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]
def _latex_row(cell_values, colwidths, colaligns, escrules=LATEX_ESCAPE_RULES): def escape_char(c): return escrules.get(c, c) escaped_values = ["".join(map(escape_char, cell)) for cell in cell_values] rowfmt = DataRow("", "&", "\\\\") return _build_simple_row(escaped_values, rowfmt)
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]
def escape_empty(val): if isinstance(val, (_text_type, _binary_type)) and not val.strip(): return ".." else: return val
anlambert/tulip
[ 2, 1, 2, 1, 1486498884 ]