function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def to_float(s: str) -> Optional[float]:
"""
Convert a string to a float, or return ``None``.
Before converting:
- strips out commas (as thousands separator); this is not internationalized
well!
- replace Unicode minus and en dash with a hyphen (minus sign)
"""
if s:
s = s.replace(',', '') # comma as thousands separator
s = s.replace('−', '-') # Unicode minus
s = s.replace('–', '-') # en dash
try:
return float(s)
except (TypeError, ValueError):
return None | RudolfCardinal/crate | [
12,
5,
12,
5,
1425998885
] |
def gaussian(data, mean, covariance):
"""!
@brief Calculates gaussian for dataset using specified mean (mathematical expectation) and variance or covariance in case
multi-dimensional data. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __init__(self, sample, amount):
"""!
@brief Constructs EM initializer. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def initialize(self, init_type = ema_init_type.KMEANS_INITIALIZATION):
"""!
@brief Calculates initial parameters for EM algorithm: means and covariances using
specified strategy. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __calculate_initial_clusters(self, centers):
"""!
@brief Calculate Euclidean distance to each point from the each cluster.
@brief Nearest points are captured by according clusters and as a result clusters are updated. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __calculate_initial_covariances(self, initial_clusters):
covariances = []
for initial_cluster in initial_clusters:
if len(initial_cluster) > 1:
cluster_sample = [self.__sample[index_point] for index_point in initial_cluster]
covariances.append(numpy.cov(cluster_sample, rowvar=False))
else:
dimension = len(self.__sample[0])
covariances.append(numpy.zeros((dimension, dimension)) + random.random() / 10.0) | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __initialize_random(self):
initial_means = [] | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __initialize_kmeans(self):
initial_centers = kmeans_plusplus_initializer(self.__sample, self.__amount).initialize()
kmeans_instance = kmeans(self.__sample, initial_centers, ccore = True)
kmeans_instance.process() | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __init__(self):
"""!
@brief Initializes EM observer. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __len__(self):
"""!
@return (uint) Amount of iterations that were done by the EM algorithm. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_iterations(self):
"""!
@return (uint) Amount of iterations that were done by the EM algorithm. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_evolution_means(self):
"""!
@return (list) Mean of each cluster on each step of clustering. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_evolution_covariances(self):
"""!
@return (list) Covariance matrix (or variance in case of one-dimensional data) of each cluster on each step of clustering. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_evolution_clusters(self):
"""!
@return (list) Allocated clusters on each step of clustering. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def notify(self, means, covariances, clusters):
"""!
@brief This method is used by the algorithm to notify observer about changes where the algorithm
should provide new values: means, covariances and allocated clusters. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def show_clusters(clusters, sample, covariances, means, figure=None, display=True):
"""!
@brief Draws clusters and in case of two-dimensional dataset draws their ellipses.
@details Allocated figure by this method should be closed using `close()` method of this visualizer. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def close(figure):
"""!
@brief Closes figure object that was used or allocated by the visualizer. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def animate_cluster_allocation(data, observer, animation_velocity = 75, movie_fps = 1, save_movie = None):
"""!
@brief Animates clustering process that is performed by EM algorithm. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def init_frame():
return frame_generation(0) | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def frame_generation(index_iteration):
figure.clf() | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __draw_ellipses(figure, visualizer, clusters, covariances, means):
ax = figure.get_axes()[0] | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __draw_ellipse(ax, x, y, angle, width, height, color):
if (width > 0.0) and (height > 0.0):
ax.plot(x, y, color=color, marker='x', markersize=6)
ellipse = patches.Ellipse((x, y), width, height, alpha=0.2, angle=-angle, linewidth=2, fill=True, zorder=2, color=color)
ax.add_patch(ellipse) | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __init__(self, data, amount_clusters, means=None, variances=None, observer=None, tolerance=0.00001, iterations=100):
"""!
@brief Initializes Expectation-Maximization algorithm for cluster analysis. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def process(self):
"""!
@brief Run clustering process of the algorithm. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_clusters(self):
"""!
@return (list) Allocated clusters where each cluster is represented by list of indexes of points from dataset,
for example, two cluster may have following representation [[0, 1, 4], [2, 3, 5, 6]]. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_centers(self):
"""!
@return (list) Corresponding centers (means) of clusters. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_covariances(self):
"""!
@return (list) Corresponding variances (or covariances in case of multi-dimensional data) of clusters. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def get_probabilities(self):
"""!
@brief Returns 2-dimensional list with belong probability of each object from data to cluster correspondingly,
where that first index is for cluster and the second is for point. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __erase_empty_clusters(self):
clusters, means, variances, pic, gaussians, rc = [], [], [], [], [], [] | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __notify(self):
if self.__observer is not None:
self.__observer.notify(self.__means, self.__variances, self.__clusters) | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __extract_clusters(self):
self.__clusters = [[] for _ in range(self.__amount_clusters)]
for index_point in range(len(self.__data)):
candidates = []
for index_cluster in range(self.__amount_clusters):
candidates.append((index_cluster, self.__rc[index_cluster][index_point])) | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __log_likelihood(self):
likelihood = 0.0 | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __probabilities(self, index_cluster, index_point):
divider = 0.0
for i in range(self.__amount_clusters):
divider += self.__pic[i] * self.__gaussians[i][index_point] | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __expectation_step(self):
self.__gaussians = [ [] for _ in range(self.__amount_clusters) ]
for index in range(self.__amount_clusters):
self.__gaussians[index] = gaussian(self.__data, self.__means[index], self.__variances[index]) | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __maximization_step(self):
self.__pic = []
self.__means = []
self.__variances = [] | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __get_stop_condition(self):
for covariance in self.__variances:
if numpy.linalg.norm(covariance) == 0.0:
return True | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __update_covariance(self, means, rc, mc):
covariance = 0.0
for index_point in range(len(self.__data)):
deviation = numpy.array([self.__data[index_point] - means])
covariance += rc[index_point] * deviation.T.dot(deviation) | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __update_mean(self, rc, mc):
mean = 0.0
for index_point in range(len(self.__data)):
mean += rc[index_point] * self.__data[index_point] | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __normalize_probabilities(self):
for index_point in range(len(self.__data)):
probability = 0.0
for index_cluster in range(len(self.__clusters)):
probability += self.__rc[index_cluster][index_point] | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __normalize_probability(self, index_point, probability):
if probability == 0.0:
return | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def __verify_arguments(self):
"""!
@brief Verify input parameters for the algorithm and throw exception in case of incorrectness. | annoviko/pyclustering | [
1048,
237,
1048,
68,
1393354743
] |
def test_slack():
o = py.Output()
assert o.readDistro('./test/slack.test') == ('Slackware', 'Slackware 14.1') | walchko/pyarchey | [
7,
7,
7,
5,
1427665853
] |
def test_arch():
o = py.Output()
assert o.readDistro('./test/arch.test') == ('Arch Linux', 'Arch Linux') | walchko/pyarchey | [
7,
7,
7,
5,
1427665853
] |
def available_hardware(__cls__):
__CACHE_KEY = 'known_sensors'
cache = terrariumCache()
known_sensors = cache.get_data(__CACHE_KEY)
if known_sensors is None:
known_sensors = {}
all_types = []
# Start dynamically loading sensors (based on: https://www.bnmetrics.com/blog/dynamic-import-in-python3)
for file in sorted(Path(__file__).parent.glob('*_sensor.py')):
imported_module = import_module( '.' + file.stem, package='{}'.format(__name__))
for i in dir(imported_module):
attribute = getattr(imported_module, i)
if inspect.isclass(attribute) and attribute != __cls__ and issubclass(attribute, __cls__):
setattr(sys.modules[__name__], file.stem, attribute)
if attribute.HARDWARE is not None:
known_sensors[attribute.HARDWARE] = attribute
all_types += attribute.TYPES
# Update sensors that do not have a known type. Those are remote and scripts sensors
all_types = list(set(all_types))
for hardware in known_sensors:
if len(known_sensors[hardware].TYPES) == 0:
known_sensors[hardware].TYPES = all_types
cache.set_data(__CACHE_KEY,known_sensors,-1)
return known_sensors | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def available_sensors(__cls__):
data = []
all_types = ['conductivity'] # For now 'conductivity' is only available through script or remote
for (hardware_type, sensor) in __cls__.available_hardware.items():
if sensor.NAME is not None:
data.append({'hardware' : hardware_type, 'name' : sensor.NAME, 'types' : sensor.TYPES})
all_types += sensor.TYPES
# Remote and script sensors can handle all the known types
all_types = list(set(all_types))
for sensor in data:
if len(sensor['types']) == 0:
sensor['types'] = all_types
return sorted(data, key=itemgetter('name')) | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def sensor_types(__cls__):
sensor_types = []
for sensor in __cls__.available_sensors:
sensor_types += sensor['types']
return sorted(list(set(sensor_types))) | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def __new__(cls, sensor_id, hardware_type, sensor_type, address, name = '', unit_value_callback = None, trigger_callback = None):
known_sensors = terrariumSensor.available_hardware
if hardware_type not in known_sensors:
raise terrariumSensorUnknownHardwareException(f'Trying to load an unknown hardware device {hardware_type} at address {address} with name {name}')
if sensor_type not in known_sensors[hardware_type].TYPES:
raise terrariumSensorInvalidSensorTypeException(f'Hardware does not have a {sensor_type} sensor at address {address} with name {name}')
return super(terrariumSensor, cls).__new__(known_sensors[hardware_type]) | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def __power_management(self, on):
# Some kind of 'power management' with the last gpio pin number :) https://raspberrypi.stackexchange.com/questions/68123/preventing-corrosion-on-yl-69
if self._device['power_mngt'] is not None:
logger.debug(f'Sensor {self} has power management enabled')
if on:
logger.debug('Enable power to the sensor {self} now.')
GPIO.output(self._device['power_mngt'], GPIO.HIGH)
sleep(1)
else:
logger.debug('Close power to the sensor {self} now.')
GPIO.output(self._device['power_mngt'], GPIO.LOW) | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def __sensor_cache_key(self):
if self._device['cache_key'] is None:
self._device['cache_key'] = md5(f'{self.HARDWARE}{self.address}'.encode()).hexdigest()
return self._device['cache_key'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def id(self):
if self._device['id'] is None:
self._device['id'] = md5(f'{self.HARDWARE}{self.address}{self.type}'.encode()).hexdigest()
return self._device['id'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def id(self, value):
if value is not None:
self._device['id'] = value.strip() | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def hardware(self):
return self.HARDWARE | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def name(self):
return self._device['name'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def name(self, value):
if '' != value.strip():
self._device['name'] = value.strip() | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def address(self):
return self._device['address'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def _address(self):
address = [ part.strip() for part in self.address.split(',') if '' != part.strip()]
return address | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def address(self, value):
value = terrariumUtils.clean_address(value)
if value is not None and '' != value:
self._device['address'] = value | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def device(self):
return self._device['device'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def sensor_type(self):
return self._device['type'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def type(self):
return self._device['type'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def value(self):
return self._device['value'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def last_update(self):
return self._device['last_update'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def erratic(self):
return self._device['erratic_errors'] | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def erratic(self, value):
self._device['erratic_errors'] = value | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def load_hardware(self, reload = False):
# Get hardware cache key based on the combination of hardware and address
hardware_cache_key = md5(f'HW-{self.HARDWARE}-{self.address}'.encode()).hexdigest()
# Load hardware device from cache
hardware = self._sensor_cache.get_data(hardware_cache_key)
if reload or hardware is None:
# Could not find valid hardware cache. So create a new hardware device
try:
hardware = func_timeout(self._UPDATE_TIME_OUT, self._load_hardware)
if hardware is not None:
# Store the hardware in the cache for unlimited of time
self._sensor_cache.set_data(hardware_cache_key,hardware,-1)
else:
# Raise error that hard is not loaded with an unknown message :(
raise terrariumSensorLoadingException(f'Unable to load sensor {self}: Did not return a device.')
except FunctionTimedOut:
# What ever fails... does not matter, as the data is still None and will raise a terrariumSensorUpdateException and trigger the retry
raise terrariumSensorLoadingException(f'Unable to load sensor {self}: timed out ({self._UPDATE_TIME_OUT} seconds) during loading.')
except Exception as ex:
raise terrariumSensorLoadingException(f'Unable to load sensor {self}: {ex}')
self._device['device'] = hardware
# Check for power management features and enable it if set
if self._device['power_mngt'] is not None:
GPIO.setup(self._device['power_mngt'], GPIO.OUT) | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def get_data(self):
data = None
self.__power_management(True)
try:
data = func_timeout(self._UPDATE_TIME_OUT, self._get_data)
except FunctionTimedOut:
# What ever fails... does not matter, as the data is still None and will raise a terrariumSensorUpdateException and trigger the retry
logger.error(f'Sensor {self} timed out after {self._UPDATE_TIME_OUT} seconds during updating...')
except Exception as ex:
logger.error(f'Sensor {self} has exception: {ex}')
self.__power_management(False)
if data is None:
raise terrariumSensorUpdateException(f'Invalid reading from sensor {self}')
return data | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def stop(self):
if self._device['power_mngt'] is not None:
GPIO.cleanup(self._device['power_mngt']) | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def scan_sensors(unit_value_callback = None, trigger_callback = None, **kwargs):
for (hardware_type,sensor_device) in terrariumSensor.available_hardware.items():
try:
for sensor in sensor_device._scan_sensors(unit_value_callback, trigger_callback, **kwargs):
yield sensor
except AttributeError as ex:
# Scanning not supported, just ignore
pass | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def _load_hardware(self):
address = self._address
# Load the analog converter here
device = MCP3008(channel=int(address[0]), device=0 if len(address) == 1 or int(address[1]) < 0 else int(address[1]))
return device | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def _address(self):
address = super()._address
if type(address[0]) is str:
if not address[0].startswith('0x'):
address[0] = '0x' + address[0]
address[0] = int(address[0],16)
return address | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def _load_hardware(self):
address = self._address
device = (address[0], smbus2.SMBus(1 if len(address) == 1 or int(address[1]) < 1 else int(address[1])))
return device | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def __soft_reset(self, i2c_bus):
i2c_bus.write_byte(self.device[0], self.SOFTRESET)
sleep(self.SOFTRESET_TIMEOUT) | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def _get_data(self):
data = {}
with self._open_hardware() as i2c_bus:
# Datasheet recommend do Soft Reset before measurement:
self.__soft_reset(i2c_bus)
if 'temperature' in self.TYPES:
bytedata = self.__get_data(i2c_bus, self.TEMPERATURE_TRIGGER_NO_HOLD,self.TEMPERATURE_WAIT_TIME)
data['temperature'] = ((bytedata[0]*256.0+bytedata[1])*175.72/65536.0)-46.85
if 'humidity' in self.TYPES:
bytedata = self.__get_data(i2c_bus, self.HUMIDITY_TRIGGER_NO_HOLD,self.HUMIDITY_WAIT_TIME)
data['humidity'] = ((bytedata[0]*256.0+bytedata[1])*125.0/65536.0)-6.0
return data | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def __init__(self, address, bus = 1):
"""Init smbus channel and tca driver on specified address."""
try:
self.PORTS_COUNT = 8 # number of switches
self.i2c_bus = smbus2.SMBus(bus)
self.i2c_address = address
if self.get_control_register() is None:
raise ValueError
except ValueError:
logger.error("No device found on specified address!")
self.i2c_bus = None
except:
logger.error("Bus on channel {} is not available.".format(bus))
logger.info("Available busses are listed as /dev/i2c*")
self.i2c_bus = None | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def get_channel(self, ch_num):
"""Get channel state (specified with ch_num), return 0=disabled or 1=enabled."""
if ch_num < 0 or ch_num > self.PORTS_COUNT - 1:
return None
register = self.get_control_register()
if register is None:
return None
value = ((register >> ch_num) & 1)
return value | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def set_channel(self, ch_num, state):
"""Change state (0=disable, 1=enable) of a channel specified in ch_num."""
if ch_num < 0 or ch_num > self.PORTS_COUNT - 1:
return False
if state != 0 and state != 1:
return False
current_value = self.get_control_register()
if current_value is None:
return False
if state:
new_value = current_value | 1 << ch_num
else:
new_value = current_value & (255 - (1 << ch_num))
return_value = self.set_control_register(new_value)
return return_value | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def _address(self):
address = super()._address
if len(address) == 1:
address.append(0)
elif len(address) == 2:
address[1] = int(address[1]) if terrariumUtils.is_float(address[1]) and terrariumUtils.is_float(address[1]) > 0 else 0
return address | theyosh/TerrariumPI | [
354,
91,
354,
15,
1452802484
] |
def __init__(self,host,index,map_name,mapping=None,id_key=None):
self.es = pyes.ES(host)
self.index = index
self.map_name = map_name
self.mapping = mapping
self.id_key = id_key | sweemeng/Malaysian-Bill-Watcher | [
8,
8,
8,
6,
1323330815
] |
def index_item(self,item):
self.es.index(item,self.index,self.map_name)
self.es.refresh(self.index) | sweemeng/Malaysian-Bill-Watcher | [
8,
8,
8,
6,
1323330815
] |
def initial_index():
host = '127.0.0.1:9200'
index = 'bill-index'
map_name = 'bill-type'
mapping = {
'document':{
'type':'attachment',
'fields':{
"title" : { "store" : "yes" },
"file" : {
"term_vector":"with_positions_offsets",
"store":"yes"
}
}
},
'name':{
'type':'string',
'store':'yes',
'boost':1.0,
'index':'analyzed'
},
'long_name':{
'type':'string',
'store':'yes',
'boost':1.0,
'index':'analyzed'
},
'status':{
'type':'string',
'store':'yes',
},
'year':{
'type':'integer',
'store':'yes'
},
'read_by':{
'type':'string',
'store':'yes',
'index':'analyzed'
},
'date_presented':{
'type':'date',
'store':'yes'
},
'bill_id':{
'type':'integer',
'store':'yes'
},
'id':{
'type':'integer',
'store':'yes'
}
} | sweemeng/Malaysian-Bill-Watcher | [
8,
8,
8,
6,
1323330815
] |
def index_single(rev_id):
host = '127.0.0.1:9200'
index = 'bill-index'
map_name = 'bill-type'
initdb()
session = DBSession()
revision = (session.query(BillRevision).get(rev_id)
)
temp = convert_to_document(revision)
search = Search(host,index,map_name)
search.index_item(temp) | sweemeng/Malaysian-Bill-Watcher | [
8,
8,
8,
6,
1323330815
] |
def get_context_data(self, **kwargs):
context = super(MenuItemMixin, self).get_context_data(**kwargs)
vattrs = inspect.getmembers(self, lambda a: not (inspect.isroutine(a)))
menu_kwargs = dict(a for a in vattrs if a[0].startswith("menu_"))
context.update(menu_kwargs)
return context | erudit/eruditorg | [
15,
6,
15,
15,
1445630709
] |
def write():
try:
p = round(weather.pressure(),2)
c = light.light()
print('{"light": '+str(c)+', "pressure": '+str(p)+' }')
except KeyboardInterrupt:
pass | alexellis/docker-arm | [
874,
108,
874,
10,
1453021933
] |
def __init__(self, parent, modDir, modinfo, *args, **kwargs):
BaseClass.__init__(self, *args, **kwargs)
self.setupUi(self)
self.parent = parent
self.client = self.parent.client # type - ClientWindow
self.modinfo = modinfo
self.modDir = modDir
util.THEME.stylesheets_reloaded.connect(self.load_stylesheet)
self.load_stylesheet()
self.setWindowTitle("Uploading Mod")
self.Name.setText(modinfo.name)
self.Version.setText(str(modinfo.version))
if modinfo.ui_only:
self.isUILabel.setText("is UI Only")
else:
self.isUILabel.setText("not UI Only")
self.UID.setText(modinfo.uid)
self.Description.setPlainText(modinfo.description)
if modinfo.icon != "":
self.IconURI.setText(utils.iconPathToFull(modinfo.icon))
self.updateThumbnail()
else:
self.Thumbnail.setPixmap(
util.THEME.pixmap("games/unknown_map.png"),
)
self.UploadButton.pressed.connect(self.upload) | FAForever/client | [
72,
87,
72,
101,
1411532757
] |
def upload(self):
n = self.Name.text()
if any([(i in n) for i in '"<*>|?/\\:']):
QtWidgets.QMessageBox.information(
self.client,
"Invalid Name",
"The mod name contains invalid characters: /\\<>|?:\"",
)
return
iconpath = utils.iconPathToFull(self.modinfo.icon)
infolder = False
if (
iconpath != ""
and (
os.path.commonprefix([
os.path.normcase(self.modDir),
os.path.normcase(iconpath),
])
== os.path.normcase(self.modDir)
)
): # the icon is in the game folder
# localpath = utils.fullPathToIcon(iconpath)
infolder = True
if iconpath != "" and not infolder:
QtWidgets.QMessageBox.information(
self.client,
"Invalid Icon File",
(
"The file {} is not located inside the modfolder. Copy the"
" icon file to your modfolder and change the mod_info.lua "
"accordingly".format(iconpath)
),
)
return
try:
temp = tempfile.NamedTemporaryFile(
mode='w+b', suffix=".zip", delete=False,
)
zipped = zipfile.ZipFile(temp, "w", zipfile.ZIP_DEFLATED)
zipdir(self.modDir, zipped, os.path.basename(self.modDir))
zipped.close()
temp.flush()
except BaseException:
QtWidgets.QMessageBox.critical(
self.client,
"Mod uploading error",
"Something went wrong zipping the mod files.",
)
return
# qfile = QtCore.QFile(temp.name)
# TODO: implement uploading via API
... | FAForever/client | [
72,
87,
72,
101,
1411532757
] |
def updateThumbnail(self):
iconfilename = utils.iconPathToFull(self.modinfo.icon)
if iconfilename == "":
return False
if os.path.splitext(iconfilename)[1].lower() == ".dds":
old = iconfilename
iconfilename = os.path.join(
self.modDir,
os.path.splitext(os.path.basename(iconfilename))[0] + ".png",
)
succes = utils.generateThumbnail(old, iconfilename)
if not succes:
QtWidgets.QMessageBox.information(
self.client,
"Invalid Icon File",
(
"Because FAF can't read DDS files, it tried to convert"
" it to a png. This failed. Try something else"
),
)
return False
try:
self.Thumbnail.setPixmap(util.THEME.pixmap(iconfilename, False))
except BaseException:
QtWidgets.QMessageBox.information(
self.client,
"Invalid Icon File",
"This was not a valid icon file. Please pick a png or jpeg",
)
return False
self.modinfo.thumbnail = utils.fullPathToIcon(iconfilename)
self.IconURI.setText(iconfilename)
return True | FAForever/client | [
72,
87,
72,
101,
1411532757
] |
def getoffset(q):
return int(q >> 16) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def offset_type(offset, type):
return long(long(offset) << 16 | type) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def hash(text, p1, p2):
"""generate a hash from the given text and its parent hashes
This hash combines both the current file contents and its history
in a manner that makes it easy to distinguish nodes with the same
content in the revision graph.
"""
# As of now, if one of the parent node is null, p2 is null
if p2 == nullid:
# deep copy of a hash is faster than creating one
s = nullhash.copy()
s.update(p1)
else:
# none of the parent nodes are nullid
l = [p1, p2]
l.sort()
s = _sha(l[0])
s.update(l[1])
s.update(text)
return s.digest() | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def decompress(bin):
""" decompress the given input """
if not bin:
return bin
t = bin[0]
if t == '\0':
return bin
if t == 'x':
return _decompress(bin)
if t == 'u':
return bin[1:]
raise RevlogError(_("unknown compression type %r") % t) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self):
self.size = struct.calcsize(indexformatv0) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def packentry(self, entry, node, version, rev):
if gettype(entry[0]):
raise RevlogError(_("index entry flags need RevlogNG"))
e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
node(entry[5]), node(entry[6]), entry[7])
return _pack(indexformatv0, *e2) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self):
self.size = struct.calcsize(indexformatng) | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def packentry(self, entry, node, version, rev):
p = _pack(indexformatng, *entry)
if rev == 0:
p = _pack(versionformat, version) + p[4:]
return p | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __init__(self, opener, indexfile):
"""
create a revlog object
opener is a function that abstracts the file opening operation
and can be used to implement COW semantics or the like.
"""
self.indexfile = indexfile
self.datafile = indexfile[:-2] + ".d"
self.opener = opener
self._cache = None
self._basecache = (0, 0)
self._chunkcache = (0, '')
self.index = []
self._pcache = {}
self._nodecache = {nullid: nullrev}
self._nodepos = None
v = REVLOG_DEFAULT_VERSION
opts = getattr(opener, 'options', None)
if opts is not None:
if 'revlogv1' in opts:
if 'generaldelta' in opts:
v |= REVLOGGENERALDELTA
else:
v = 0
i = ''
self._initempty = True
try:
f = self.opener(self.indexfile)
i = f.read()
f.close()
if len(i) > 0:
v = struct.unpack(versionformat, i[:4])[0]
self._initempty = False
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
self.version = v
self._inline = v & REVLOGNGINLINEDATA
self._generaldelta = v & REVLOGGENERALDELTA
flags = v & ~0xFFFF
fmt = v & 0xFFFF
if fmt == REVLOGV0 and flags:
raise RevlogError(_("index %s unknown flags %#04x for format v0")
% (self.indexfile, flags >> 16))
elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
raise RevlogError(_("index %s unknown flags %#04x for revlogng")
% (self.indexfile, flags >> 16))
elif fmt > REVLOGNG:
raise RevlogError(_("index %s unknown format %d")
% (self.indexfile, fmt))
self._io = revlogio()
if self.version == REVLOGV0:
self._io = revlogoldio()
try:
d = self._io.parseindex(i, self._inline)
except (ValueError, IndexError):
raise RevlogError(_("index %s is corrupted") % (self.indexfile))
self.index, nodemap, self._chunkcache = d
if nodemap is not None:
self.nodemap = self._nodecache = nodemap
if not self._chunkcache:
self._chunkclear() | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def __len__(self):
return len(self.index) - 1 | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def nodemap(self):
self.rev(self.node(0))
return self._nodecache | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def clearcaches(self):
try:
self._nodecache.clearcaches()
except AttributeError:
self._nodecache = {nullid: nullrev}
self._nodepos = None | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def node(self, rev):
return self.index[rev][7] | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
def parents(self, node):
i = self.index
d = i[self.rev(node)]
return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline | mikel-egana-aranguren/SADI-Galaxy-Docker | [
1,
3,
1,
1,
1417087373
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.