text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_args(args): """ Parse arguments and check if the arguments are valid """
if not os.path.exists(args.fd): print("Not a valid path", args.fd, file=ERROR_LOG) return [], [], False if args.fl is not None: # we already ensure the file can be opened and opened the file file_line = args.fl.readline() amr_ids = file_line.strip().split() elif args.f is None: print("No AMR ID was given", file=ERROR_LOG) return [], [], False else: amr_ids = args.f names = [] check_name = True if args.p is None: names = get_names(args.fd, amr_ids) # no need to check names check_name = False if len(names) == 0: print("Cannot find any user who tagged these AMR", file=ERROR_LOG) return [], [], False else: names = args.p if len(names) == 0: print("No user was given", file=ERROR_LOG) return [], [], False if len(names) == 1: print("Only one user is given. Smatch calculation requires at least two users.", file=ERROR_LOG) return [], [], False if "consensus" in names: con_index = names.index("consensus") names.pop(con_index) names.append("consensus") # check if all the AMR_id and user combinations are valid if check_name: pop_name = [] for i, name in enumerate(names): for amr in amr_ids: amr_path = args.fd + name + "/" + amr + ".txt" if not os.path.exists(amr_path): print("User", name, "fails to tag AMR", amr, file=ERROR_LOG) pop_name.append(i) break if len(pop_name) != 0: pop_num = 0 for p in pop_name: print("Deleting user", names[p - pop_num], "from the name list", file=ERROR_LOG) names.pop(p - pop_num) pop_num += 1 if len(names) < 2: print("Not enough users to evaluate. Smatch requires >2 users who tag all the AMRs", file=ERROR_LOG) return "", "", False return amr_ids, names, True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_csv(filename, csv_data, mode="w"): """ Create a CSV file with the given data and store it in the file with the given name. :param filename: name of the file to store the data in :pram csv_data: the data to be stored in the file :param mode: the mode in which we have to open the file. It can be 'w', 'a', etc. Default is 'w' """
with open(filename, mode) as f: csv_data.replace("_", r"\_") f.write(csv_data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_metric_index(self, data_source): """ This function will return the elasticsearch index for a corresponding data source. It chooses in between the default and the user inputed es indices and returns the user inputed one if it is available. :param data_source: the data source for which the index has to be returned :returns: an elasticsearch index name """
if data_source in self.index_dict: index = self.index_dict[data_source] else: index = self.class2index[self.ds2class[data_source]] return Index(index_name=index)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sec_overview(self): """ Generate the "overview" section of the report. """
logger.debug("Calculating Overview metrics.") data_path = os.path.join(self.data_dir, "overview") if not os.path.exists(data_path): os.makedirs(data_path) overview_config = { "activity_metrics": [], "author_metrics": [], "bmi_metrics": [], "time_to_close_metrics": [], "projects_metrics": [] } for ds in self.data_sources: metric_file = self.ds2class[ds] metric_index = self.get_metric_index(ds) overview = metric_file.overview(metric_index, self.start_date, self.end_date) for section in overview_config: overview_config[section] += overview[section] overview_config['activity_file_csv'] = "data_source_evolution.csv" overview_config['efficiency_file_csv'] = "efficiency.csv" # ACTIVITY METRICS metrics = overview_config['activity_metrics'] file_name = overview_config['activity_file_csv'] file_name = os.path.join(data_path, file_name) csv = "metricsnames, netvalues, relativevalues, datasource\n" for metric in metrics: (last, percentage) = get_trend(metric.timeseries()) csv += "{}, {}, {}, {}\n".format(metric.name, last, percentage, metric.DS_NAME) csv = csv.replace("_", "\_") create_csv(file_name, csv) # AUTHOR METRICS """ Git Authors: ----------- Description: average number of developers per month by quarters (so we have the average number of developers per month during those three months). If the approach is to work at the level of month, then just the number of developers per month. """ author = overview_config['author_metrics'] if author: authors_by_period = author[0] title_label = file_label = authors_by_period.name + ' per ' + self.interval file_path = os.path.join(data_path, file_label) csv_data = authors_by_period.timeseries(dataframe=True) # generate the CSV and the image file displaying the data self.create_csv_fig_from_df([csv_data], file_path, [authors_by_period.name], fig_type="bar", title=title_label, xlabel="time_period", ylabel=authors_by_period.id) # BMI METRICS bmi = [] bmi_metrics = overview_config['bmi_metrics'] csv = "" for metric in bmi_metrics: bmi.append(metric.aggregations()) csv += metric.id + ", " # Time to close METRICS ttc = [] ttc_metrics = overview_config['time_to_close_metrics'] for metric in ttc_metrics: ttc.append(metric.aggregations()) csv += metric.id + ", " # generate efficiency file csv = csv[:-2] + "\n" csv = csv.replace("_", "") bmi.extend(ttc) for val in bmi: csv += "%s, " % str_val(val) if csv[-2:] == ", ": csv = csv[:-2] file_name = os.path.join(data_path, 'efficiency.csv') create_csv(file_name, csv) logger.debug("Overview metrics generation complete!")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sec_project_activity(self): """ Generate the "project activity" section of the report. """
logger.debug("Calculating Project Activity metrics.") data_path = os.path.join(self.data_dir, "activity") if not os.path.exists(data_path): os.makedirs(data_path) for ds in self.data_sources: metric_file = self.ds2class[ds] metric_index = self.get_metric_index(ds) project_activity = metric_file.project_activity(metric_index, self.start_date, self.end_date) headers = [] data_frames = [] title_names = [] file_name = "" for metric in project_activity['metrics']: file_name += metric.DS_NAME + "_" + metric.id + "_" title_names.append(metric.name) headers.append(metric.id) data_frames.append(metric.timeseries(dataframe=True)) file_name = file_name[:-1] # remove trailing underscore file_path = os.path.join(data_path, file_name) title_name = " & ".join(title_names) + ' per ' + self.interval self.create_csv_fig_from_df(data_frames, file_path, headers, fig_type="bar", title=title_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sec_project_community(self): """ Generate the "project community" section of the report. """
logger.debug("Calculating Project Community metrics.") data_path = os.path.join(self.data_dir, "community") if not os.path.exists(data_path): os.makedirs(data_path) project_community_config = { "author_metrics": [], "people_top_metrics": [], "orgs_top_metrics": [] } for ds in self.data_sources: metric_file = self.ds2class[ds] metric_index = self.get_metric_index(ds) project_community = metric_file.project_community(metric_index, self.start_date, self.end_date) for section in project_community_config: project_community_config[section] += project_community[section] # Get git authors: author = project_community_config['author_metrics'][0] author_ts = author.timeseries(dataframe=True) csv_labels = [author.id] file_label = author.DS_NAME + "_" + author.id file_path = os.path.join(data_path, file_label) title_label = author.name + " per " + self.interval self.create_csv_fig_from_df([author_ts], file_path, csv_labels, fig_type="bar", title=title_label) """Main developers""" authors = project_community_config['people_top_metrics'][0] authors_df = authors.aggregations() authors_df = authors_df.head(self.TOP_MAX) authors_df.columns = [authors.id, "commits"] file_label = authors.DS_NAME + "_top_" + authors.id + ".csv" file_path = os.path.join(data_path, file_label) authors_df.to_csv(file_path, index=False) """Main organizations""" orgs = project_community_config['orgs_top_metrics'][0] orgs_df = orgs.aggregations() orgs_df = orgs_df.head(self.TOP_MAX) orgs_df.columns = [orgs.id, "commits"] file_label = orgs.DS_NAME + "_top_" + orgs.id + ".csv" file_path = os.path.join(data_path, file_label) orgs_df.to_csv(file_path, index=False)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_csv_fig_from_df(self, data_frames=[], filename=None, headers=[], index_label=None, fig_type=None, title=None, xlabel=None, ylabel=None, xfont=10, yfont=10, titlefont=15, fig_size=(8, 10), image_type="eps"): """ Joins all the datafarames horizontally and creates a CSV and an image file from those dataframes. :param data_frames: a list of dataframes containing timeseries data from various metrics :param filename: the name of the csv and image file :param headers: a list of headers to be applied to columns of the dataframes :param index_label: name of the index column :param fig_type: figure type. Currently we support 'bar' graphs default: normal graph :param title: display title of the figure :param filename: file name to save the figure as :param xlabel: label for x axis :param ylabel: label for y axis :param xfont: font size of x axis label :param yfont: font size of y axis label :param titlefont: font size of title of the figure :param fig_size: tuple describing size of the figure (in centimeters) (W x H) :param image_type: the image type to save the image as: jpg, png, etc default: png :returns: creates a csv having name as "filename".csv and an image file having the name as "filename"."image_type" """
if not data_frames: logger.error("No dataframes provided to create CSV") sys.exit(1) assert(len(data_frames) == len(headers)) dataframes = [] for index, df in enumerate(data_frames): df = df.rename(columns={"value": headers[index].replace("_", "")}) dataframes.append(df) res_df = pd.concat(dataframes, axis=1) if "unixtime" in res_df: del res_df['unixtime'] if not index_label: index_label = "Date" # Create the CSV file: csv_name = filename + ".csv" res_df.to_csv(csv_name, index_label=index_label) logger.debug("file: {} was created.".format(csv_name)) # Create the Image: image_name = filename + "." + image_type title = title.replace("_", "") figure(figsize=fig_size) plt.subplot(111) if fig_type == "bar": ax = res_df.plot.bar(figsize=fig_size) ticklabels = res_df.index ax.xaxis.set_major_formatter(matplotlib.ticker.FixedFormatter(ticklabels)) else: plt.plot(res_df) if not ylabel: ylabel = "num " + " & ".join(headers) if not xlabel: xlabel = index_label plt.title(title, fontsize=titlefont) plt.ylabel(ylabel, fontsize=yfont) plt.xlabel(xlabel, fontsize=xfont) plt.grid(True) plt.savefig(image_name) logger.debug("Figure {} was generated.".format(image_name))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def create_tables(database): '''Create all tables in the given database''' logging.getLogger(__name__).debug("Creating missing database tables") database.connect() database.create_tables([User, Group, UserToGroup, GroupToCapability, Capability], safe=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def populate_with_defaults(): '''Create user admin and grant him all permission If the admin user already exists the function will simply return ''' logging.getLogger(__name__).debug("Populating with default users") if not User.select().where(User.name == 'admin').exists(): admin = User.create(name='admin', password='admin') admins = Group.create(name='admins') starCap = Capability.create(domain='.+', action=(Action.CREATE | Action.READ | Action.UPDATE | Action.DELETE)) admins.capabilities.add(starCap) admin.groups.add(admins) admin.save() if not User.select().where(User.name == 'anonymous').exists(): anon = User.create(name='anonymous', password='') anons = Group.create(name='anonymous') readCap = Capability.create(domain=Capability.simToReg('/volumes/*'), action=Action.READ) anons.capabilities.add(readCap) anon.groups.add(anons) anon.save()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def init_db(dbURL, pwd_salt_size=None, pwd_rounds=None): '''Initialize users database initialize database and create necessary tables to handle users oprations. :param dbURL: database url, as described in :func:`init_proxy` ''' if not dbURL: dbURL = 'sqlite:///:memory:' logging.getLogger(__name__).debug("Initializing database: {}".format(dict(url=dbURL, pwd_salt_size=pwd_salt_size, pwd_rounds=pwd_rounds))) try: db = init_proxy(dbURL) global pwdCryptCtx pwdCryptCtx = gen_crypt_context(salt_size=pwd_salt_size, rounds=pwd_rounds) create_tables(db) return db except Exception as e: e.args = (e.args[0] + ' [users database]',) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def normalize(s, replace_spaces=True): """Normalize non-ascii characters to their closest ascii counterparts """
whitelist = (' -' + string.ascii_letters + string.digits) if type(s) == six.binary_type: s = six.text_type(s, 'utf-8', 'ignore') table = {} for ch in [ch for ch in s if ch not in whitelist]: if ch not in table: try: replacement = unicodedata.normalize('NFKD', ch)[0] if replacement in whitelist: table[ord(ch)] = replacement else: table[ord(ch)] = u'_' except: table[ord(ch)] = u'_' if replace_spaces: return s.translate(table).replace(u'_', u'').replace(' ', '_') else: return s.translate(table).replace(u'_', u'')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_robot_variables(): """Return list of Robot Framework -compatible cli-variables parsed from ROBOT_-prefixed environment variable """
prefix = 'ROBOT_' variables = [] def safe_str(s): if isinstance(s, six.text_type): return s else: return six.text_type(s, 'utf-8', 'ignore') for key in os.environ: if key.startswith(prefix) and len(key) > len(prefix): variables.append(safe_str( '%s:%s' % (key[len(prefix):], os.environ[key]), )) return variables
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def convert(self): """Initiate one-shot conversion. The current settings are used, with the exception of continuous mode."""
c = self.config c &= (~MCP342x._continuous_mode_mask & 0x7f) # Force one-shot c |= MCP342x._not_ready_mask # Convert logger.debug('Convert ' + hex(self.address) + ' config: ' + bin(c)) self.bus.write_byte(self.address, c)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_query_range(cls, date_field, start=None, end=None): """ Create a filter dict with date_field from start to end dates. :param date_field: field with the date value :param start: date with the from value. Should be a datetime.datetime object of the form: datetime.datetime(2018, 5, 25, 15, 17, 39) :param end: date with the to value. Should be a datetime.datetime object of the form: datetime.datetime(2018, 5, 25, 15, 17, 39) :return: a dict containing a range filter which can be used later in an es_dsl Search object using the `filter` method. """
if not start and not end: return '' start_end = {} if start: start_end["gte"] = "%s" % start.isoformat() if end: start_end["lte"] = "%s" % end.isoformat() query_range = {date_field: start_end} return query_range
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_query_basic(cls, date_field=None, start=None, end=None, filters={}): """ Create a es_dsl query object with the date range and filters. :param date_field: field with the date value :param start: date with the from value, should be a datetime.datetime object :param end: date with the to value, should be a datetime.datetime object :param filters: dict with the filters to be applied :return: a DSL query containing the required parameters Ex: {'query': {'bool': {'filter': [{'range': {'DATE_FIELD': {'gte': '2015-05-19T00:00:00', 'lte': '2018-05-18T00:00:00'}}}], 'must': [{'match_phrase': {'first_name': 'Jhon'}}, {'match_phrase': {'last_name': 'Doe'}}, {'match_phrase': {'Phone': 2222222}} ]}}} """
query_basic = Search() query_filters = cls.__get_query_filters(filters) for f in query_filters: query_basic = query_basic.query(f) query_filters_inverse = cls.__get_query_filters(filters, inverse=True) # Here, don't forget the '~'. That is what makes this an inverse filter. for f in query_filters_inverse: query_basic = query_basic.query(~f) if not date_field: query_range = {} else: query_range = cls.__get_query_range(date_field, start, end) # Applying the range filter query_basic = query_basic.filter('range', **query_range) return query_basic
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_query_agg_terms(cls, field, agg_id=None): """ Create a es_dsl aggregation object based on a term. :param field: field to be used to aggregate :return: a tuple with the aggregation id and es_dsl aggregation object. Ex: { "terms": { "field": <field>, "size:": <size>, "order":{ "_count":"desc" } } Which will then be used as Search.aggs.bucket(agg_id, query_agg) method to add aggregations to the es_dsl Search object """
if not agg_id: agg_id = cls.AGGREGATION_ID query_agg = A("terms", field=field, size=cls.AGG_SIZE, order={"_count": "desc"}) return (agg_id, query_agg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_query_agg_max(cls, field, agg_id=None): """ Create an es_dsl aggregation object for getting the max value of a field. :param field: field from which the get the max value :return: a tuple with the aggregation id and es_dsl aggregation object. Ex: { "max": { "field": <field> } """
if not agg_id: agg_id = cls.AGGREGATION_ID query_agg = A("max", field=field) return (agg_id, query_agg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_query_agg_avg(cls, field, agg_id=None): """ Create an es_dsl aggregation object for getting the average value of a field. :param field: field from which the get the average value :return: a tuple with the aggregation id and es_dsl aggregation object. Ex: { "avg": { "field": <field> } """
if not agg_id: agg_id = cls.AGGREGATION_ID query_agg = A("avg", field=field) return (agg_id, query_agg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_query_agg_cardinality(cls, field, agg_id=None): """ Create an es_dsl aggregation object for getting the approximate count of distinct values of a field. :param field: field from which the get count of distinct values :return: a tuple with the aggregation id and es_dsl aggregation object. Ex: { "cardinality": { "field": <field>, "precision_threshold": 3000 } """
if not agg_id: agg_id = cls.AGGREGATION_ID query_agg = A("cardinality", field=field, precision_threshold=cls.ES_PRECISION) return (agg_id, query_agg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_bounds(cls, start=None, end=None): """ Return a dict with the bounds for a date_histogram agg. :param start: date from for the date_histogram agg, should be a datetime.datetime object :param end: date to for the date_histogram agg, should be a datetime.datetime object :return: a dict with the DSL bounds for a date_histogram aggregation """
bounds = {} if start or end: # Extend bounds so we have data until start and end start_ts = None end_ts = None if start: # elasticsearch is unable to convert date with microseconds into long # format for processing, hence we convert microseconds to zero start = start.replace(microsecond=0) start_ts = start.replace(tzinfo=timezone.utc).timestamp() start_ts_ms = start_ts * 1000 # ES uses ms if end: end = end.replace(microsecond=0) end_ts = end.replace(tzinfo=timezone.utc).timestamp() end_ts_ms = end_ts * 1000 # ES uses ms bounds_data = {} if start: bounds_data["min"] = start_ts_ms if end: bounds_data["max"] = end_ts_ms bounds["extended_bounds"] = bounds_data return bounds
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_query_agg_ts(cls, field, time_field, interval=None, time_zone=None, start=None, end=None, agg_type='count', offset=None): """ Create an es_dsl aggregation object for getting the time series values for a field. :param field: field to get the time series values :param time_field: field with the date :param interval: interval to be used to generate the time series values, such as:(year(y), quarter(q), month(M), week(w), day(d), hour(h), minute(m), second(s)) :param time_zone: time zone for the time_field :param start: date from for the time series, should be a datetime.datetime object :param end: date to for the time series, should be a datetime.datetime object :param agg_type: kind of aggregation for the field (cardinality, avg, percentiles) :param offset: offset to be added to the time_field in days :return: a aggregation object to calculate timeseries values of a field """
""" Time series for an aggregation metric """ if not interval: interval = '1M' if not time_zone: time_zone = 'UTC' if not field: field_agg = '' else: if agg_type == "cardinality": agg_id, field_agg = cls.__get_query_agg_cardinality(field, agg_id=cls.AGGREGATION_ID + 1) elif agg_type == "avg": agg_id, field_agg = cls.__get_query_agg_avg(field, agg_id=cls.AGGREGATION_ID + 1) elif agg_type == "percentiles": agg_id, field_agg = cls.__get_query_agg_percentiles(field, agg_id=cls.AGGREGATION_ID + 1) else: raise RuntimeError("Aggregation of %s in ts not supported" % agg_type) bounds = {} if start or end: if not offset: # With offset and quarter interval bogus buckets are added # to the start and to the end if extended_bounds is used # https://github.com/elastic/elasticsearch/issues/23776 bounds = cls.__get_bounds(start, end) else: bounds = {'offset': offset} query_agg = A("date_histogram", field=time_field, interval=interval, time_zone=time_zone, min_doc_count=0, **bounds) agg_dict = field_agg.to_dict()[field_agg.name] query_agg.bucket(agg_id, field_agg.name, **agg_dict) return (cls.AGGREGATION_ID, query_agg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_count(cls, date_field=None, start=None, end=None, filters={}): """ Build the DSL query for counting the number of items. :param date_field: field with the date :param start: date from which to start counting, should be a datetime.datetime object :param end: date until which to count items, should be a datetime.datetime object :param filters: dict with the filters to be applied :return: a DSL query with size parameter """
""" Total number of items """ query_basic = cls.__get_query_basic(date_field=date_field, start=start, end=end, filters=filters) # size=0 gives only the count and not the hits query = query_basic.extra(size=0) return query
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ensure_fields(self, fields, force_refetch=False): """ Makes sure we fetched the fields, and populate them if not. """
# We fetched with fields=None, we should have fetched them all if self._fetched_fields is None or self._initialized_with_doc: return if force_refetch: missing_fields = fields else: missing_fields = [f for f in fields if f not in self._fetched_fields] if len(missing_fields) == 0: return if "_id" not in self: raise Exception("Can't ensure_fields because _id is missing") self.refetch_fields(missing_fields)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def refetch_fields(self, missing_fields): """ Refetches a list of fields from the DB """
db_fields = self.mongokat_collection.find_one({"_id": self["_id"]}, fields={k: 1 for k in missing_fields}) self._fetched_fields += tuple(missing_fields) if not db_fields: return for k, v in db_fields.items(): self[k] = v
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unset_fields(self, fields): """ Removes this list of fields from both the local object and the DB. """
self.mongokat_collection.update_one({"_id": self["_id"]}, {"$unset": { f: 1 for f in fields }}) for f in fields: if f in self: del self[f]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save_partial(self, data=None, allow_protected_fields=False, **kwargs): """ Saves just the currently set fields in the database. """
# Backwards compat, deprecated argument if "dotnotation" in kwargs: del kwargs["dotnotation"] if data is None: data = dotdict(self) if "_id" not in data: raise KeyError("_id must be set in order to do a save_partial()") del data["_id"] if len(data) == 0: return if not allow_protected_fields: self.mongokat_collection._check_protected_fields(data) apply_on = dotdict(self) self._initialized_with_doc = False self.mongokat_collection.update_one({"_id": self["_id"]}, {"$set": data}, **kwargs) for k, v in data.items(): apply_on[k] = v self.update(dict(apply_on))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_default_args(tool_name): """ Read default argument values for a given tool :param tool_name: Name of the script to read the default arguments for :return: Dictionary of default arguments (shared + tool-specific) """
global opinel_arg_dir profile_name = 'default' # h4ck to have an early read of the profile name for i, arg in enumerate(sys.argv): if arg == '--profile' and len(sys.argv) >= i + 1: profile_name = sys.argv[i + 1] #if not os.path.isdir(opinel_arg_dir): # os.makedirs(opinel_arg_dir) if not os.path.isdir(opinel_arg_dir): try: os.makedirs(opinel_arg_dir) except: # Within AWS Lambda, home directories are not writable. This attempts to detect that... # ...and uses the /tmp folder, which *is* writable in AWS Lambda opinel_arg_dir = os.path.join(tempfile.gettempdir(), '.aws/opinel') if not os.path.isdir(opinel_arg_dir): os.makedirs(opinel_arg_dir) opinel_arg_file = os.path.join(opinel_arg_dir, '%s.json' % profile_name) default_args = {} if os.path.isfile(opinel_arg_file): with open(opinel_arg_file, 'rt') as f: all_args = json.load(f) for target in all_args: if tool_name.endswith(target): default_args.update(all_args[target]) for k in all_args['shared']: if k not in default_args: default_args[k] = all_args['shared'][k] return default_args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prompt(test_input = None): """ Prompt function that works for Python2 and Python3 :param test_input: Value to be returned when testing :return: Value typed by user (or passed in argument when testing) """
if test_input != None: if type(test_input) == list and len(test_input): choice = test_input.pop(0) elif type(test_input) == list: choice = '' else: choice = test_input else: # Coverage: 4 missed statements try: choice = raw_input() except: choice = input() return choice
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prompt_4_mfa_code(activate = False, input = None): """ Prompt for an MFA code :param activate: Set to true when prompting for the 2nd code when activating a new MFA device :param input: Used for unit testing :return: The MFA code """
while True: if activate: prompt_string = 'Enter the next value: ' else: prompt_string = 'Enter your MFA code (or \'q\' to abort): ' mfa_code = prompt_4_value(prompt_string, no_confirm = True, input = input) try: if mfa_code == 'q': return mfa_code int(mfa_code) mfa_code[5] break except: printError('Error: your MFA code must only consist of digits and be at least 6 characters long.') return mfa_code
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prompt_4_mfa_serial(input = None): """ Prompt for an MFA serial number :param input: Used for unit testing :return: The MFA serial number """
return prompt_4_value('Enter your MFA serial:', required = False, regex = re_mfa_serial_format, regex_format = mfa_serial_format, input = input)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prompt_4_overwrite(filename, force_write, input = None): """ Prompt whether the file should be overwritten :param filename: Name of the file about to be written :param force_write: Skip confirmation prompt if this flag is set :param input: Used for unit testing :return: Boolean whether file write operation is allowed """
if not os.path.exists(filename) or force_write: return True return prompt_4_yes_no('File \'{}\' already exists. Do you want to overwrite it'.format(filename), input = input)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_feed_renderer(engines, name): """ From engine name, load the engine path and return the renderer class Raise 'FeedparserError' if any loading error """
if name not in engines: raise FeedparserError("Given feed name '{}' does not exists in 'settings.FEED_RENDER_ENGINES'".format(name)) renderer = safe_import_module(engines[name]) return renderer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def clear_line(mode=2): ''' Clear the current line. Arguments: mode: | 0 | 'forward' | 'right' - Clear cursor to end of line. | 1 | 'backward' | 'left' - Clear cursor to beginning of line. | 2 | 'full' - Clear entire line. Note: Cursor position does not change. ''' text = sc.erase_line(_mode_map.get(mode, mode)) _write(text) return text
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def wait_key(keys=None): ''' Waits for a keypress at the console and returns it. "Where's the any key?" Arguments: keys - if passed, wait for this specific key, e.g. ESC. may be a tuple. Returns: char or ESC - depending on key hit. None - immediately under i/o redirection, not an interactive tty. ''' if is_a_tty(): if keys: if not isinstance(keys, tuple): keys = (keys,) while True: key = _getch() if key in keys: return key else: return _getch()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_region_list(service, chosen_regions = [], partition_name = 'aws'): """ Build the list of target region names :param service: :param chosen_regions: :param partition_name: :return: """
service = 'ec2containerservice' if service == 'ecs' else service # Of course things aren't that easy... # Get list of regions from botocore regions = Session().get_available_regions(service, partition_name = partition_name) if len(chosen_regions): return list((Counter(regions) & Counter(chosen_regions)).elements()) else: return regions
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect_service(service, credentials, region_name = None, config = None, silent = False): """ Instantiates an AWS API client :param service: :param credentials: :param region_name: :param config: :param silent: :return: """
api_client = None try: client_params = {} client_params['service_name'] = service.lower() session_params = {} session_params['aws_access_key_id'] = credentials['AccessKeyId'] session_params['aws_secret_access_key'] = credentials['SecretAccessKey'] session_params['aws_session_token'] = credentials['SessionToken'] if region_name: client_params['region_name'] = region_name session_params['region_name'] = region_name if config: client_params['config'] = config aws_session = boto3.session.Session(**session_params) if not silent: infoMessage = 'Connecting to AWS %s' % service if region_name: infoMessage = infoMessage + ' in %s' % region_name printInfo('%s...' % infoMessage) api_client = aws_session.client(**client_params) except Exception as e: printException(e) return api_client
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_truncated_response(callback, params, entities): """ Handle truncated responses :param callback: :param params: :param entities: :return: """
results = {} for entity in entities: results[entity] = [] while True: try: marker_found = False response = callback(**params) for entity in entities: if entity in response: results[entity] = results[entity] + response[entity] for marker_name in ['NextToken', 'Marker', 'PaginationToken']: if marker_name in response and response[marker_name]: params[marker_name] = response[marker_name] marker_found = True if not marker_found: break except Exception as e: if is_throttled(e): time.sleep(1) else: raise e return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pdftojpg(filehandle, meta): """Converts a PDF to a JPG and places it back onto the FileStorage instance passed to it as a BytesIO object. Optional meta arguments are: * resolution: int or (int, int) used for wand to determine resolution, defaults to 300. * width: new width of the image for resizing, defaults to 1080 * bgcolor: new background color, defaults to 'white' """
resolution = meta.get('resolution', 300) width = meta.get('width', 1080) bgcolor = Color(meta.get('bgcolor', 'white')) stream = BytesIO() with Image(blob=filehandle.stream, resolution=resolution) as img: img.background_color = bgcolor img.alpha_channel = False img.format = 'jpeg' ratio = width / img.width img.resize(width, int(ratio * img.height)) img.compression_quality = 90 img.save(file=stream) stream.seek(0) filehandle.stream = stream return filehandle
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def change_filename(filehandle, meta): """Changes the filename to reflect the conversion from PDF to JPG. This method will preserve the original filename in the meta dictionary. """
filename = secure_filename(meta.get('filename', filehandle.filename)) basename, _ = os.path.splitext(filename) meta['original_filename'] = filehandle.filename filehandle.filename = filename + '.jpg' return filehandle
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def pdf_saver(filehandle, *args, **kwargs): "Uses werkzeug.FileStorage instance to save the converted image." fullpath = get_save_path(filehandle.filename) filehandle.save(fullpath, buffer_size=kwargs.get('buffer_size', 16384))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_data(data_file, key_name = None, local_file = False, format = 'json'): """ Load a JSON data file :param data_file: :param key_name: :param local_file: :return: """
if local_file: if data_file.startswith('/'): src_file = data_file else: src_dir = os.getcwd() src_file = os.path.join(src_dir, data_file) else: src_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') if not os.path.isdir(src_dir): src_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../data') src_file = os.path.join(src_dir, data_file) with open(src_file) as f: if format == 'json': data = json.load(f) elif format == 'yaml': data = yaml.load(f) elif format not in ['json', 'yaml'] and not key_name: data = f.read() else: printError('Error, argument \'key_name\' may not be used with data in %s format.' % format) return None if key_name: data = data[key_name] return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_ip_ranges(filename, local_file = True, ip_only = False, conditions = []): """ Returns the list of IP prefixes from an ip-ranges file :param filename: :param local_file: :param conditions: :param ip_only: :return: """
targets = [] data = load_data(filename, local_file = local_file) if 'source' in data: # Filtered IP ranges conditions = data['conditions'] local_file = data['local_file'] if 'local_file' in data else False data = load_data(data['source'], local_file = local_file, key_name = 'prefixes') else: # Plain IP ranges data = data['prefixes'] for d in data: condition_passed = True for condition in conditions: if type(condition) != list or len(condition) < 3: continue condition_passed = pass_condition(d[condition[0]], condition[1], condition[2]) if not condition_passed: break if condition_passed: targets.append(d) if ip_only: ips = [] for t in targets: ips.append(t['ip_prefix']) return ips else: return targets
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_app(self, app): """Configure a Flask application to use this ZODB extension."""
assert 'zodb' not in app.extensions, \ 'app already initiated for zodb' app.extensions['zodb'] = _ZODBState(self, app) app.teardown_request(self.close_db)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def close_db(self, exception): """Added as a `~flask.Flask.teardown_request` to applications to commit the transaction and disconnect ZODB if it was used during the request."""
if self.is_connected: if exception is None and not transaction.isDoomed(): transaction.commit() else: transaction.abort() self.connection.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connection(self): """Request-bound database connection."""
assert flask.has_request_context(), \ 'tried to connect zodb outside request' if not self.is_connected: connector = flask.current_app.extensions['zodb'] flask._request_ctx_stack.top.zodb_connection = connector.db.open() transaction.begin() return flask._request_ctx_stack.top.zodb_connection
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_user_to_group(iam_client, user, group, quiet = False): """ Add an IAM user to an IAM group :param iam_client: :param group: :param user: :param user_info: :param dry_run: :return: """
if not quiet: printInfo('Adding user to group %s...' % group) iam_client.add_user_to_group(GroupName = group, UserName = user)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_virtual_mfa_device(iam_client, mfa_serial): """ Delete a vritual MFA device given its serial number :param iam_client: :param mfa_serial: :return: """
try: printInfo('Deleting MFA device %s...' % mfa_serial) iam_client.delete_virtual_mfa_device(SerialNumber = mfa_serial) except Exception as e: printException(e) printError('Failed to delete MFA device %s' % mfa_serial) pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_group_category_regex(category_groups, category_regex_args): """ Initialize and compile regular expression for category groups :param category_regex_args: List of string regex :return: List of compiled regex """
category_regex = [] authorized_empty_regex = 1 if len(category_regex_args) and len(category_groups) != len(category_regex_args): printError('Error: you must provide as many regex as category groups.') return None for regex in category_regex_args: if len(regex) < 1: if authorized_empty_regex > 0: category_regex.append(None) authorized_empty_regex -= 1 else: printError('Error: you cannot have more than one empty regex to automatically assign groups to users.') return None else: category_regex.append(re.compile(regex)) return category_regex
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _get_extended_palette_entry(self, name, index, is_hex=False): ''' Compute extended entry, once on the fly. ''' values = None is_fbterm = (env.TERM == 'fbterm') # sigh if 'extended' in self._palette_support: # build entry if is_hex: index = str(find_nearest_color_hexstr(index, method=self._dg_method)) start_codes = self._start_codes_extended if is_fbterm: start_codes = self._start_codes_extended_fbterm values = [start_codes, index] # downgrade section elif 'basic' in self._palette_support: if is_hex: nearest_idx = find_nearest_color_hexstr(index, color_table4, method=self._dg_method) else: from .color_tables import index_to_rgb8 # find rgb for idx nearest_idx = find_nearest_color_index(*index_to_rgb8[index], color_table=color_table4, method=self._dg_method) values = self._index_to_ansi_values(nearest_idx) return (self._create_entry(name, values, fbterm=is_fbterm) if values else empty)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _get_true_palette_entry(self, name, digits): ''' Compute truecolor entry, once on the fly. values must become sequence of decimal int strings: ('1', '2', '3') ''' values = None type_digits = type(digits) is_fbterm = (env.TERM == 'fbterm') # sigh if 'truecolor' in self._palette_support: # build entry values = [self._start_codes_true] if type_digits is str: # convert hex string if len(digits) == 3: values.extend(str(int(ch + ch, 16)) for ch in digits) else: # chunk 'BB00BB', to ints to 'R', 'G', 'B': values.extend(str(int(digits[i:i+2], 16)) for i in (0, 2 ,4)) else: # tuple of str-digit or int, may not matter to bother: values.extend(str(digit) for digit in digits) # downgrade section elif 'extended' in self._palette_support: if type_digits is str: nearest_idx = find_nearest_color_hexstr(digits, method=self._dg_method) else: # tuple if type(digits[0]) is str: # convert to ints digits = tuple(int(digit) for digit in digits) nearest_idx = find_nearest_color_index(*digits, method=self._dg_method) start_codes = self._start_codes_extended if is_fbterm: start_codes = self._start_codes_extended_fbterm values = [start_codes, str(nearest_idx)] elif 'basic' in self._palette_support: if type_digits is str: nearest_idx = find_nearest_color_hexstr(digits, color_table4, method=self._dg_method) else: # tuple if type(digits[0]) is str: # convert to ints digits = tuple(int(digit) for digit in digits) nearest_idx = find_nearest_color_index(*digits, color_table=color_table4, method=self._dg_method) values = self._index_to_ansi_values(nearest_idx) return (self._create_entry(name, values, fbterm=is_fbterm) if values else empty)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _index_to_ansi_values(self, index): ''' Converts an palette index to the corresponding ANSI color. Arguments: index - an int (from 0-15) Returns: index as str in a list for compatibility with values. ''' if self.__class__.__name__[0] == 'F': # Foreground if index < 8: index += ANSI_FG_LO_BASE else: index += (ANSI_FG_HI_BASE - 8) # 82 else: # Background if index < 8: index += ANSI_BG_LO_BASE else: index += (ANSI_BG_HI_BASE - 8) # 92 return [str(index)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _create_entry(self, name, values, fbterm=False): ''' Render first values as string and place as first code, save, and return attr. ''' if fbterm: attr = _PaletteEntryFBTerm(self, name.upper(), ';'.join(values)) else: attr = _PaletteEntry(self, name.upper(), ';'.join(values)) setattr(self, name, attr) # now cached return attr
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def write(self, data): ''' This could be a bit less clumsy. ''' if data == '\n': # print does this return self.stream.write(data) else: bytes_ = 0 for line in data.splitlines(True): nl = '' if line.endswith('\n'): # mv nl to end: line = line[:-1] nl = '\n' bytes_ += self.stream.write( f'{self.start}{line}{self.default}{nl}' ) or 0 # in case None returned (on Windows) return bytes_
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def set_output(self, outfile): ''' Set's the output file, currently only useful with context-managers. Note: This function is experimental and may not last. ''' if self._orig_stdout: # restore Usted sys.stdout = self._orig_stdout self._stream = outfile sys.stdout = _LineWriter(self, self._stream, self.default)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _render(self): ''' Standard rendering of bar graph. ''' cm_chars = self._comp_style(self.icons[_ic] * self._num_complete_chars) em_chars = self._empt_style(self.icons[_ie] * self._num_empty_chars) return f'{self._first}{cm_chars}{em_chars}{self._last} {self._lbl}'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _render_internal_label(self): ''' Render with a label inside the bar graph. ''' ncc = self._num_complete_chars bar = self._lbl.center(self.iwidth) cm_chars = self._comp_style(bar[:ncc]) em_chars = self._empt_style(bar[ncc:]) return f'{self._first}{cm_chars}{em_chars}{self._last}'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _get_ncc(self, width, ratio): ''' Get the number of complete chars. This one figures the remainder for the partial char as well. ''' sub_chars = round(width * ratio * self.partial_chars_len) ncc, self.remainder = divmod(sub_chars, self.partial_chars_len) return ncc
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _render(self): ''' figure partial character ''' p_char = '' if not self.done and self.remainder: p_style = self._comp_style if self.partial_char_extra_style: if p_style is str: p_style = self.partial_char_extra_style else: p_style = p_style + self.partial_char_extra_style p_char = p_style(self.partial_chars[self.remainder]) self._num_empty_chars -= 1 cm_chars = self._comp_style(self.icons[_ic] * self._num_complete_chars) em_chars = self._empt_style(self.icons[_ie] * self._num_empty_chars) return f'{self._first}{cm_chars}{p_char}{em_chars}{self._last} {self._lbl}'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def thread_work(targets, function, params = {}, num_threads = 0): """ Generic multithreading helper :param targets: :param function: :param params: :param num_threads: :return: """
q = Queue(maxsize=0) if not num_threads: num_threads = len(targets) for i in range(num_threads): worker = Thread(target=function, args=(q, params)) worker.setDaemon(True) worker.start() for target in targets: q.put(target) q.join()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def threaded_per_region(q, params): """ Helper for multithreading on a per-region basis :param q: :param params: :return: """
while True: try: params['region'] = q.get() method = params['method'] method(params) except Exception as e: printException(e) finally: q.task_done()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def location(self, x=None, y=None): ''' Temporarily move the cursor, perform work, and return to the previous location. :: with screen.location(40, 20): print('Hello, world!') ''' stream = self._stream stream.write(self.save_pos) # cursor position if x is not None and y is not None: stream.write(self.mv(y, x)) elif x is not None: stream.write(self.mv_x(x)) elif y is not None: stream.write(self.mv_y(y)) stream.flush() try: yield self finally: stream.write(self.rest_pos) stream.flush()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fullscreen(self): ''' Context Manager that enters full-screen mode and restores normal mode on exit. :: with screen.fullscreen(): print('Hello, world!') ''' stream = self._stream stream.write(self.alt_screen_enable) stream.write(str(self.save_title(0))) # 0 = both icon, title stream.flush() try: yield self finally: stream.write(self.alt_screen_disable) stream.write(str(self.restore_title(0))) # 0 = icon & title stream.flush()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def hidden_cursor(self): ''' Context Manager that hides the cursor and restores it on exit. :: with screen.hidden_cursor(): print('Clandestine activity…') ''' stream = self._stream stream.write(self.hide_cursor) stream.flush() try: yield self finally: stream.write(self.show_cursor) stream.flush()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def undecorated(o): """Remove all decorators from a function, method or class"""
# class decorator if type(o) is type: return o try: # python2 closure = o.func_closure except AttributeError: pass try: # python3 closure = o.__closure__ except AttributeError: return if closure: for cell in closure: # avoid infinite recursion if cell.cell_contents is o: continue # check if the contents looks like a decorator; in that case # we need to go one level down into the dream, otherwise it # might just be a different closed-over variable, which we # can ignore. # Note: this favors supporting decorators defined without # @wraps to the detriment of function/method/class closures if looks_like_a_decorator(cell.cell_contents): undecd = undecorated(cell.cell_contents) if undecd: return undecd else: return o else: return o
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def assume_role(role_name, credentials, role_arn, role_session_name, silent = False): """ Assume role and save credentials :param role_name: :param credentials: :param role_arn: :param role_session_name: :param silent: :return: """
external_id = credentials.pop('ExternalId') if 'ExternalId' in credentials else None # Connect to STS sts_client = connect_service('sts', credentials, silent = silent) # Set required arguments for assume role call sts_args = { 'RoleArn': role_arn, 'RoleSessionName': role_session_name } # MFA used ? if 'mfa_serial' in credentials and 'mfa_code' in credentials: sts_args['TokenCode'] = credentials['mfa_code'] sts_args['SerialNumber'] = credentials['mfa_serial'] # External ID used ? if external_id: sts_args['ExternalId'] = external_id # Assume the role sts_response = sts_client.assume_role(**sts_args) credentials = sts_response['Credentials'] cached_credentials_filename = get_cached_credentials_filename(role_name, role_arn) #with open(cached_credentials_filename, 'wt+') as f: # write_data_to_file(f, sts_response, True, False) cached_credentials_path = os.path.dirname(cached_credentials_filename) if not os.path.isdir(cached_credentials_path): os.makedirs(cached_credentials_path) save_blob_as_json(cached_credentials_filename, sts_response, True, False) # blob, force_write, debug): return credentials
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_password(length=16): """ Generate a password using random characters from uppercase, lowercase, digits, and symbols :param length: Length of the password to be generated :return: The random password """
chars = string.ascii_letters + string.digits + '!@#$%^&*()_+-=[]{};:,<.>?|' modulus = len(chars) pchars = os.urandom(16) if type(pchars) == str: return ''.join(chars[i % modulus] for i in map(ord, pchars)) else: return ''.join(chars[i % modulus] for i in pchars)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_sts_session(profile_name, credentials, duration = 28800, session_name = None, save_creds = True): """ Fetch STS credentials :param profile_name: :param credentials: :param duration: :param session_name: :param save_creds: :return: """
# Set STS arguments sts_args = { 'DurationSeconds': duration } # Prompt for MFA code if MFA serial present if 'SerialNumber' in credentials and credentials['SerialNumber']: if not credentials['TokenCode']: credentials['TokenCode'] = prompt_4_mfa_code() if credentials['TokenCode'] == 'q': credentials['SerialNumber'] = None sts_args['TokenCode'] = credentials['TokenCode'] sts_args['SerialNumber'] = credentials['SerialNumber'] # Init session sts_client = boto3.session.Session(credentials['AccessKeyId'], credentials['SecretAccessKey']).client('sts') sts_response = sts_client.get_session_token(**sts_args) if save_creds: # Move long-lived credentials if needed if not profile_name.endswith('-nomfa') and credentials['AccessKeyId'].startswith('AKIA'): write_creds_to_aws_credentials_file(profile_name + '-nomfa', credentials) # Save STS values in the .aws/credentials file write_creds_to_aws_credentials_file(profile_name, sts_response['Credentials']) return sts_response['Credentials']
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_creds_from_aws_credentials_file(profile_name, credentials_file = aws_credentials_file): """ Read credentials from AWS config file :param profile_name: :param credentials_file: :return: """
credentials = init_creds() profile_found = False try: # Make sure the ~.aws folder exists if not os.path.exists(aws_config_dir): os.makedirs(aws_config_dir) with open(credentials_file, 'rt') as cf: for line in cf: profile_line = re_profile_name.match(line) if profile_line: if profile_line.groups()[0] == profile_name: profile_found = True else: profile_found = False if profile_found: if re_access_key.match(line): credentials['AccessKeyId'] = line.split("=")[1].strip() elif re_secret_key.match(line): credentials['SecretAccessKey'] = line.split("=")[1].strip() elif re_mfa_serial.match(line): credentials['SerialNumber'] = (line.split('=')[1]).strip() elif re_session_token.match(line) or re_security_token.match(line): credentials['SessionToken'] = ('='.join(x for x in line.split('=')[1:])).strip() elif re_expiration.match(line): credentials['Expiration'] = ('='.join(x for x in line.split('=')[1:])).strip() except Exception as e: # Silent if error is due to no ~/.aws/credentials file if not hasattr(e, 'errno') or e.errno != 2: printException(e) return credentials
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_creds_from_csv(filename): """ Read credentials from a CSV file :param filename: :return: """
key_id = None secret = None mfa_serial = None secret_next = False with open(filename, 'rt') as csvfile: for i, line in enumerate(csvfile): values = line.split(',') for v in values: if v.startswith('AKIA'): key_id = v.strip() secret_next = True elif secret_next: secret = v.strip() secret_next = False elif re_mfa_serial_format.match(v): mfa_serial = v.strip() return key_id, secret, mfa_serial
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_creds_from_environment_variables(): """ Read credentials from environment variables :return: """
creds = init_creds() # Check environment variables if 'AWS_ACCESS_KEY_ID' in os.environ and 'AWS_SECRET_ACCESS_KEY' in os.environ: creds['AccessKeyId'] = os.environ['AWS_ACCESS_KEY_ID'] creds['SecretAccessKey'] = os.environ['AWS_SECRET_ACCESS_KEY'] if 'AWS_SESSION_TOKEN' in os.environ: creds['SessionToken'] = os.environ['AWS_SESSION_TOKEN'] return creds
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_profile_from_environment_variables(): """ Read profiles from env :return: """
role_arn = os.environ.get('AWS_ROLE_ARN', None) external_id = os.environ.get('AWS_EXTERNAL_ID', None) return role_arn, external_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_profile_from_aws_config_file(profile_name, config_file = aws_config_file): """ Read profiles from AWS config file :param profile_name: :param config_file: :return: """
role_arn = None source_profile = 'default' mfa_serial = None profile_found = False external_id = None try: with open(config_file, 'rt') as config: for line in config: profile_line = re_profile_name.match(line) if profile_line: role_profile_name = profile_line.groups()[0].split()[-1] if role_profile_name == profile_name: profile_found = True else: profile_found = False if profile_found: if re_role_arn.match(line): role_arn = line.split('=')[1].strip() elif re_source_profile.match(line): source_profile = line.split('=')[1].strip() elif re_mfa_serial.match(line): mfa_serial = line.split('=')[1].strip() elif re_external_id.match(line): external_id = line.split('=')[1].strip() except Exception as e: # Silent if error is due to no .aws/config file if not hasattr(e, 'errno') or e.errno != 2: printException(e) return role_arn, source_profile, mfa_serial, external_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_creds_to_aws_credentials_file(profile_name, credentials, credentials_file = aws_credentials_file): """ Write credentials to AWS config file :param profile_name: :param credentials: :param credentials_file: :return: """
profile_found = False profile_ever_found = False session_token_written = False security_token_written = False mfa_serial_written = False expiration_written = False # Create the .aws folder if needed if not os.path.isdir(aws_config_dir): os.mkdir(aws_config_dir) # Create an empty file if target does not exist if not os.path.isfile(credentials_file): open(credentials_file, 'a').close() # Open and parse/edit file for line in fileinput.input(credentials_file, inplace=True): profile_line = re_profile_name.match(line) if profile_line: if profile_line.groups()[0] == profile_name: profile_found = True profile_ever_found = True else: profile_found = False print(line.rstrip()) elif profile_found: if re_access_key.match(line) and 'AccessKeyId' in credentials and credentials['AccessKeyId']: print('aws_access_key_id = %s' % credentials['AccessKeyId']) elif re_secret_key.match(line) and 'SecretAccessKey' in credentials and credentials['SecretAccessKey']: print('aws_secret_access_key = %s' % credentials['SecretAccessKey']) elif re_mfa_serial.match(line) and 'SerialNumber' in credentials and credentials['SerialNumber']: print('aws_mfa_serial = %s' % credentials['SerialNumber']) mfa_serial_written = True elif re_session_token.match(line) and 'SessionToken' in credentials and credentials['SessionToken']: print('aws_session_token = %s' % credentials['SessionToken']) session_token_written = True elif re_security_token.match(line) and 'SessionToken' in credentials and credentials['SessionToken']: print('aws_security_token = %s' % credentials['SessionToken']) security_token_written = True elif re_expiration.match(line) and 'Expiration' in credentials and credentials['Expiration']: print('expiration = %s' % credentials['Expiration']) expiration_written = True else: print(line.rstrip()) else: print(line.rstrip()) # Complete the profile if needed if profile_found: with open(credentials_file, 'a') as f: complete_profile(f, credentials, session_token_written, mfa_serial_written) # Add new profile if not found if not profile_ever_found: with open(credentials_file, 'a') as f: f.write('[%s]\n' % profile_name) f.write('aws_access_key_id = %s\n' % credentials['AccessKeyId']) f.write('aws_secret_access_key = %s\n' % credentials['SecretAccessKey']) complete_profile(f, credentials, session_token_written, mfa_serial_written)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def complete_profile(f, credentials, session_token_written, mfa_serial_written): """ Append session token and mfa serial if needed :param f: :param credentials: :param session_token_written: :param mfa_serial_written: :return: """
session_token = credentials['SessionToken'] if 'SessionToken' in credentials else None mfa_serial = credentials['SerialNumber'] if 'SerialNumber' in credentials else None if session_token and not session_token_written: f.write('aws_session_token = %s\n' % session_token) if mfa_serial and not mfa_serial_written: f.write('aws_mfa_serial = %s\n' % mfa_serial)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_stackset_ready_accounts(credentials, account_ids, quiet=True): """ Verify which AWS accounts have been configured for CloudFormation stack set by attempting to assume the stack set execution role :param credentials: AWS credentials to use when calling sts:assumerole :param org_account_ids: List of AWS accounts to check for Stackset configuration :return: List of account IDs in which assuming the stackset execution role worked """
api_client = connect_service('sts', credentials, silent=True) configured_account_ids = [] for account_id in account_ids: try: role_arn = 'arn:aws:iam::%s:role/AWSCloudFormationStackSetExecutionRole' % account_id api_client.assume_role(RoleArn=role_arn, RoleSessionName='opinel-get_stackset_ready_accounts') configured_account_ids.append(account_id) except Exception as e: pass if len(configured_account_ids) != len(account_ids) and not quiet: printInfo('Only %d of these accounts have the necessary stack set execution role:' % len(configured_account_ids)) printDebug(str(configured_account_ids)) return configured_account_ids
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch(self, url): """ Get the feed content using 'requests' """
try: r = requests.get(url, timeout=self.timeout) except requests.exceptions.Timeout: if not self.safe: raise else: return None # Raise 404/500 error if any if r and not self.safe: r.raise_for_status() return r.text
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, content): """ Parse the fetched feed content Feedparser returned dict contain a 'bozo' key which can be '1' if the feed is malformed. Return None if the feed is malformed and 'bozo_accept' is 'False', else return the feed content dict. If the feed is malformed but 'bozo_accept' is 'True', the feed content dict will contain the parsing error exception informations in 'bozo_exception'. """
if content is None: return None feed = feedparser.parse(content) # When feed is malformed if feed['bozo']: # keep track of the parsing error exception but as string # infos, not an exception object exception_content = { "exception": str(type(feed['bozo_exception'])), "content": str(feed['bozo_exception'].getException()), "line": feed['bozo_exception'].getLineNumber(), "message": feed['bozo_exception'].getMessage(), } # Overwrite the bozo content from feedparser feed['bozo_exception'] = exception_content # bozo feeds are not accepted if not self.bozo_accept: feed = None return feed
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _hash_url(self, url): """ Hash the URL to an md5sum. """
if isinstance(url, six.text_type): url = url.encode('utf-8') return hashlib.md5(url).hexdigest()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, url, expiration): """ Fetch the feed if no cache exist or if cache is stale """
# Hash url to have a shorter key and add it expiration time to avoid clash for # other url usage with different expiration cache_key = self.cache_key.format(**{ 'id': self._hash_url(url), 'expire': str(expiration) }) # Get feed from cache if any feed = cache.get(cache_key) # Else fetch it if feed is None: #print "No feed cache, have to fetch it" feed = self.fetch(url) cache.set(cache_key, feed, expiration) return self.parse(feed)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_context(self, url, expiration): """ Build template context with formatted feed content """
self._feed = self.get(url, expiration) return { self.feed_context_name: self.format_feed_content(self._feed), }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render(self, url, template=None, expiration=0): """ Render feed template """
template = template or self.default_template return render_to_string(template, self.get_context(url, expiration))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def is_ansi_capable(): ''' Check to see whether this version of Windows is recent enough to support "ANSI VT"" processing. ''' BUILD_ANSI_AVAIL = 10586 # Win10 TH2 CURRENT_VERS = sys.getwindowsversion()[:3] if CURRENT_VERS[2] > BUILD_ANSI_AVAIL: result = True else: result = False log.debug('version %s %s', CURRENT_VERS, result) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_color(name, stream=STD_OUTPUT_HANDLE): ''' Returns current colors of console. https://docs.microsoft.com/en-us/windows/console/getconsolescreenbufferinfo Arguments: name: one of ('background', 'bg', 'foreground', 'fg') stream: Handle to stdout, stderr, etc. Returns: int: a color id from the conhost palette. Ids under 0x8 (8) are dark colors, above light. ''' stream = kernel32.GetStdHandle(stream) csbi = CONSOLE_SCREEN_BUFFER_INFO() kernel32.GetConsoleScreenBufferInfo(stream, byref(csbi)) color_id = csbi.wAttributes & _mask_map.get(name, name) log.debug('color_id from conhost: %d', color_id) if name in ('background', 'bg'): color_id /= 16 # divide by 16 log.debug('color_id divided: %d', color_id) # convert to ansi order color_id = _win_to_ansi_offset_map.get(color_id, color_id) log.debug('ansi color_id: %d', color_id) return color_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_position(stream=STD_OUTPUT_HANDLE): ''' Returns current position of cursor, starts at 1. ''' stream = kernel32.GetStdHandle(stream) csbi = CONSOLE_SCREEN_BUFFER_INFO() kernel32.GetConsoleScreenBufferInfo(stream, byref(csbi)) pos = csbi.dwCursorPosition # zero based, add ones for compatibility. return (pos.X + 1, pos.Y + 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def set_position(x, y, stream=STD_OUTPUT_HANDLE): ''' Sets current position of the cursor. ''' stream = kernel32.GetStdHandle(stream) value = x + (y << 16) kernel32.SetConsoleCursorPosition(stream, c_long(value))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_title(): ''' Returns console title string. https://docs.microsoft.com/en-us/windows/console/getconsoletitle ''' MAX_LEN = 256 buffer_ = create_unicode_buffer(MAX_LEN) kernel32.GetConsoleTitleW(buffer_, MAX_LEN) log.debug('%s', buffer_.value) return buffer_.value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_header(self): """Read the header of a MPQ archive."""
def read_mpq_header(offset=None): if offset: self.file.seek(offset) data = self.file.read(32) header = MPQFileHeader._make( struct.unpack(MPQFileHeader.struct_format, data)) header = header._asdict() if header['format_version'] == 1: data = self.file.read(12) extended_header = MPQFileHeaderExt._make( struct.unpack(MPQFileHeaderExt.struct_format, data)) header.update(extended_header._asdict()) return header def read_mpq_user_data_header(): data = self.file.read(16) header = MPQUserDataHeader._make( struct.unpack(MPQUserDataHeader.struct_format, data)) header = header._asdict() header['content'] = self.file.read(header['user_data_header_size']) return header magic = self.file.read(4) self.file.seek(0) if magic == b'MPQ\x1a': header = read_mpq_header() header['offset'] = 0 elif magic == b'MPQ\x1b': user_data_header = read_mpq_user_data_header() header = read_mpq_header(user_data_header['mpq_header_offset']) header['offset'] = user_data_header['mpq_header_offset'] header['user_data_header'] = user_data_header else: raise ValueError("Invalid file header.") return header
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_table(self, table_type): """Read either the hash or block table of a MPQ archive."""
if table_type == 'hash': entry_class = MPQHashTableEntry elif table_type == 'block': entry_class = MPQBlockTableEntry else: raise ValueError("Invalid table type.") table_offset = self.header['%s_table_offset' % table_type] table_entries = self.header['%s_table_entries' % table_type] key = self._hash('(%s table)' % table_type, 'TABLE') self.file.seek(table_offset + self.header['offset']) data = self.file.read(table_entries * 16) data = self._decrypt(data, key) def unpack_entry(position): entry_data = data[position*16:position*16+16] return entry_class._make( struct.unpack(entry_class.struct_format, entry_data)) return [unpack_entry(i) for i in range(table_entries)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_hash_table_entry(self, filename): """Get the hash table entry corresponding to a given filename."""
hash_a = self._hash(filename, 'HASH_A') hash_b = self._hash(filename, 'HASH_B') for entry in self.hash_table: if (entry.hash_a == hash_a and entry.hash_b == hash_b): return entry
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_file(self, filename, force_decompress=False): """Read a file from the MPQ archive."""
def decompress(data): """Read the compression type and decompress file data.""" compression_type = ord(data[0:1]) if compression_type == 0: return data elif compression_type == 2: return zlib.decompress(data[1:], 15) elif compression_type == 16: return bz2.decompress(data[1:]) else: raise RuntimeError("Unsupported compression type.") hash_entry = self.get_hash_table_entry(filename) if hash_entry is None: return None block_entry = self.block_table[hash_entry.block_table_index] # Read the block. if block_entry.flags & MPQ_FILE_EXISTS: if block_entry.archived_size == 0: return None offset = block_entry.offset + self.header['offset'] self.file.seek(offset) file_data = self.file.read(block_entry.archived_size) if block_entry.flags & MPQ_FILE_ENCRYPTED: raise NotImplementedError("Encryption is not supported yet.") if not block_entry.flags & MPQ_FILE_SINGLE_UNIT: # File consists of many sectors. They all need to be # decompressed separately and united. sector_size = 512 << self.header['sector_size_shift'] sectors = block_entry.size // sector_size + 1 if block_entry.flags & MPQ_FILE_SECTOR_CRC: crc = True sectors += 1 else: crc = False positions = struct.unpack('<%dI' % (sectors + 1), file_data[:4*(sectors+1)]) result = BytesIO() sector_bytes_left = block_entry.size for i in range(len(positions) - (2 if crc else 1)): sector = file_data[positions[i]:positions[i+1]] if (block_entry.flags & MPQ_FILE_COMPRESS and (force_decompress or sector_bytes_left > len(sector))): sector = decompress(sector) sector_bytes_left -= len(sector) result.write(sector) file_data = result.getvalue() else: # Single unit files only need to be decompressed, but # compression only happens when at least one byte is gained. if (block_entry.flags & MPQ_FILE_COMPRESS and (force_decompress or block_entry.size > block_entry.archived_size)): file_data = decompress(file_data) return file_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract(self): """Extract all the files inside the MPQ archive in memory."""
if self.files: return dict((f, self.read_file(f)) for f in self.files) else: raise RuntimeError("Can't extract whole archive without listfile.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_to_disk(self): """Extract all files and write them to disk."""
archive_name, extension = os.path.splitext(os.path.basename(self.file.name)) if not os.path.isdir(os.path.join(os.getcwd(), archive_name)): os.mkdir(archive_name) os.chdir(archive_name) for filename, data in self.extract().items(): f = open(filename, 'wb') f.write(data or b'') f.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_files(self, *filenames): """Extract given files from the archive to disk."""
for filename in filenames: data = self.read_file(filename) f = open(filename, 'wb') f.write(data or b'') f.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _hash(self, string, hash_type): """Hash a string using MPQ's hash function."""
hash_types = { 'TABLE_OFFSET': 0, 'HASH_A': 1, 'HASH_B': 2, 'TABLE': 3 } seed1 = 0x7FED7FED seed2 = 0xEEEEEEEE for ch in string.upper(): if not isinstance(ch, int): ch = ord(ch) value = self.encryption_table[(hash_types[hash_type] << 8) + ch] seed1 = (value ^ (seed1 + seed2)) & 0xFFFFFFFF seed2 = ch + seed1 + seed2 + (seed2 << 5) + 3 & 0xFFFFFFFF return seed1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _decrypt(self, data, key): """Decrypt hash or block table or a sector."""
seed1 = key seed2 = 0xEEEEEEEE result = BytesIO() for i in range(len(data) // 4): seed2 += self.encryption_table[0x400 + (seed1 & 0xFF)] seed2 &= 0xFFFFFFFF value = struct.unpack("<I", data[i*4:i*4+4])[0] value = (value ^ (seed1 + seed2)) & 0xFFFFFFFF seed1 = ((~seed1 << 0x15) + 0x11111111) | (seed1 >> 0x0B) seed1 &= 0xFFFFFFFF seed2 = value + seed2 + (seed2 << 5) + 3 & 0xFFFFFFFF result.write(struct.pack("<I", value)) return result.getvalue()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _prepare_encryption_table(): """Prepare encryption table for MPQ hash function."""
seed = 0x00100001 crypt_table = {} for i in range(256): index = i for j in range(5): seed = (seed * 125 + 3) % 0x2AAAAB temp1 = (seed & 0xFFFF) << 0x10 seed = (seed * 125 + 3) % 0x2AAAAB temp2 = (seed & 0xFFFF) crypt_table[index] = (temp1 | temp2) index += 0x100 return crypt_table
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def key_for_request(self, method, url, **kwargs): """ Return a cache key from a given set of request parameters. Default behavior is to return a complete URL for all GET requests, and None otherwise. Can be overriden if caching of non-get requests is desired. """
if method != 'get': return None return requests.Request(url=url, params=kwargs.get('params', {})).prepare().url
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def request(self, method, url, **kwargs): """ Override, wraps Session.request in caching. Cache is only used if key_for_request returns a valid key and should_cache_response was true as well. """
# short circuit if cache isn't configured if not self.cache_storage: resp = super(CachingSession, self).request(method, url, **kwargs) resp.fromcache = False return resp resp = None method = method.lower() request_key = self.key_for_request(method, url, **kwargs) if request_key and not self.cache_write_only: resp = self.cache_storage.get(request_key) if resp: resp.fromcache = True else: resp = super(CachingSession, self).request(method, url, **kwargs) # save to cache if request and response meet criteria if request_key and self.should_cache_response(resp): self.cache_storage.set(request_key, resp) resp.fromcache = False return resp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_destination_callable(dest): """Creates a callable out of the destination. If it's already callable, the destination is returned. Instead, if the object is a string or a writable object, it's wrapped in a closure to be used later. """
if callable(dest): return dest elif hasattr(dest, 'write') or isinstance(dest, string_types): return _use_filehandle_to_save(dest) else: raise TypeError("Destination must be a string, writable or callable object.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _validate(self, filehandle, metadata, catch_all_errors=False): """Runs all attached validators on the provided filehandle. In the base implmentation of Transfer, the result of `_validate` isn't checked. Rather validators are expected to raise UploadError to report failure. `_validate` can optionally catch all UploadErrors that occur or bail out and the first one by toggling the `catch_all_errors` flag. If catch_all_errors is Truthy then a single UploadError is raised consisting of all UploadErrors raised. """
errors = [] DEFAULT_ERROR_MSG = '{0!r}({1!r}, {2!r}) returned False' for validator in self._validators: try: if not validator(filehandle, metadata): msg = DEFAULT_ERROR_MSG.format(validator, filehandle, metadata) raise UploadError(msg) except UploadError as e: if catch_all_errors: errors.append(e.args[0]) else: raise if errors: raise UploadError(errors)