function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def _eval(v): try: match = _PY_REPR.match(v) if match: clsstr = match.group(1) modstr = clsstr.rsplit(".", 1)[0] return eval(v, {modstr: import_module(modstr)}) except: ... return v
biothings/biothings.api
[ 39, 25, 39, 80, 1452637246 ]
def _get_jinja_environment(): current_dir = os.path.dirname(__file__) templates_dir = os.path.join(current_dir, '../templates') return jinja2.Environment(loader=jinja2.FileSystemLoader(templates_dir), extensions=['jinja2.ext.autoescape'], autoescape=True)
m-lab/mlab-ns
[ 11, 14, 11, 30, 1431721186 ]
def send_no_content(request): request.response.headers['Access-Control-Allow-Origin'] = '*' request.response.headers['Content-Type'] = 'application/json' request.response.set_status(204)
m-lab/mlab-ns
[ 11, 14, 11, 30, 1431721186 ]
def send_server_error(request, output_type=message.FORMAT_HTML): request.error(500) if output_type == message.FORMAT_JSON: data = {} data['status_code'] = '500 Internal Server Error' json_data = json.dumps(data) request.response.headers['Content-Type'] = 'application/json' request.response.out.write(json_data) else: request.response.out.write(_get_jinja_template('not_found.html').render( ))
m-lab/mlab-ns
[ 11, 14, 11, 30, 1431721186 ]
def validate_rdm_string(ops: str) -> str: """Check that a string for rdms are valid. Args: ops (str): String expression to be computed. Returns (str): Either 'element' or 'tensor'. """ qftops = ops.split() nops = len(qftops) assert (nops % 2) == 0 if any(char.isdigit() for char in ops): creation = re.compile(r"^[0-9]+\^$") annihilation = re.compile(r"^[0-9]+$") ncre = 0 nani = 0 for opr in qftops: if creation.match(opr): ncre += 1 elif annihilation.match(opr): nani += 1 else: raise TypeError("Unsupported behavior for {}".format(ops)) assert nani == ncre return "element" creation = re.compile(r"^[a-z]\^$") annihilation = re.compile(r"^[a-z]$") ncre = 0 nani = 0 for opr in qftops: if creation.match(opr): ncre += 1 elif annihilation.match(opr): nani += 1 else: raise TypeError("Unsupported behvior for {}.".format(ops)) if nani != ncre: raise ValueError("Unequal creation and annihilation operators.") return "tensor"
quantumlib/OpenFermion-FQE
[ 42, 22, 42, 8, 1585772605 ]
def sinebow(h): """A cyclic and uniform colormap, see http://basecase.org/env/on-rainbows.""" f = lambda x: jnp.sin(jnp.pi * x)**2 return jnp.stack([f(3 / 6 - h), f(5 / 6 - h), f(7 / 6 - h)], -1)
google/mipnerf
[ 653, 94, 653, 25, 1621008071 ]
def depth_to_normals(depth): """Assuming `depth` is orthographic, linearize it to a set of normals.""" f_blur = jnp.array([1, 2, 1]) / 4 f_edge = jnp.array([-1, 0, 1]) / 2 dy = convolve2d(depth, f_blur[None, :] * f_edge[:, None]) dx = convolve2d(depth, f_blur[:, None] * f_edge[None, :]) inv_denom = 1 / jnp.sqrt(1 + dx**2 + dy**2) normals = jnp.stack([dx * inv_denom, dy * inv_denom, inv_denom], -1) return normals
google/mipnerf
[ 653, 94, 653, 25, 1621008071 ]
def visualize_normals(depth, acc, scaling=None): """Visualize fake normals of `depth` (optionally scaled to be isotropic).""" if scaling is None: mask = ~jnp.isnan(depth) x, y = jnp.meshgrid( jnp.arange(depth.shape[1]), jnp.arange(depth.shape[0]), indexing='xy') xy_var = (jnp.var(x[mask]) + jnp.var(y[mask])) / 2 z_var = jnp.var(depth[mask]) scaling = jnp.sqrt(xy_var / z_var) scaled_depth = scaling * depth normals = depth_to_normals(scaled_depth) vis = jnp.isnan(normals) + jnp.nan_to_num((normals + 1) / 2, 0) # Set non-accumulated pixels to white. if acc is not None: vis = vis * acc[:, :, None] + (1 - acc)[:, :, None] return vis
google/mipnerf
[ 653, 94, 653, 25, 1621008071 ]
def get(self): self.redirect('/ai/beat-blender/view/')
googlecreativelab/beat-blender
[ 178, 36, 178, 7, 1521683101 ]
def __init__(self): super(LinksPage, self).__init__(url='http://wargaming.net')
wgnet/webium
[ 152, 39, 152, 5, 1421401901 ]
def test_acquisition_multipoint_expected_improvement(): """ Check if the q-EI acquisition function produces similar results as sampling """ # Batch size k = 2 # Set mean to one mu = np.ones((k)) # Sample random 3 dimensional covarinace matrix: L = np.tril(np.random.sample((k, k))) Sigma = L @ L.T # Set current minimum to a random number smaller than the mean: current_minimum = np.random.uniform() # Compute acquisition: qei_analytic, _, _ = MultipointExpectedImprovement(None)._get_acquisition(mu, Sigma, current_minimum) acq_fast = MultipointExpectedImprovement(None, fast_compute=True, eps=1e-3) qei_analytic_fast, _, _ = acq_fast._get_acquisition(mu, Sigma, current_minimum) # Reference with sampling N = 1000000 samples = np.random.multivariate_normal(mu, Sigma, size=N) qei_sampled = current_minimum - np.min(samples, axis=1) qei_sampled = sum(qei_sampled[qei_sampled > 0]) / float(N) assert np.abs(qei_sampled - qei_analytic) < TOL assert np.abs(qei_analytic_fast - qei_analytic) < TOL
EmuKit/emukit
[ 491, 118, 491, 38, 1536037294 ]
def create_train_state(config: ml_collections.ConfigDict, rng: np.ndarray, input_shape: Sequence[int], num_classes: int) -> Tuple[Any, TrainState]: """Create and initialize the model. Args: config: Configuration for model. rng: JAX PRNG Key. input_shape: Shape of the inputs fed into the model. num_classes: Number of classes in the output layer. Returns: The initialized TrainState with the optimizer. """ # Create model function. if config.model_name.startswith("resnet"): model_cls = resnet_v1.create_model(config.model_name, config) elif config.model_name.startswith("nest"): model_cls = nest_net.create_model(config.model_name, config) else: raise ValueError(f"Model {config.model_name} not supported.") model = functools.partial(model_cls, num_classes=num_classes) variables = model(train=False).init(rng, jnp.ones(input_shape)) model_state = dict(variables) params = model_state.pop("params") parameter_overview.log_parameter_overview(params) if config.get("log_model_profile"): # Be True or [1, 2] message_1 = utils.log_throughput(model, variables, input_shape) message_2 = utils.compute_flops(model, variables, [1] + list(input_shape[1:])) count = parameter_overview.count_parameters(params) message_3 = "Params: {:,}".format(count) message = ", ".join([message_1, message_2, message_3]) logging.info("Profile results %s", message) if (isinstance(config.log_model_profile, (int,)) and config.log_model_profile >= 2): sys.exit(0) # Create optimizer. if config.optim in ("adamw", "adam"): if config.get("optim_wd_ignore"): # Allow zero weight decay for certain parameters listed in optim_wd_ignore igns = config.optim_wd_ignore p = flax.optim.ModelParamTraversal( lambda path, _: not any([i in path for i in igns])) p_nowd = flax.optim.ModelParamTraversal( lambda path, _: any([i in path for i in igns])) p_opt = flax.optim.Adam(weight_decay=config.weight_decay) p_nowd_opt = flax.optim.Adam(weight_decay=0) optimizer = flax.optim.MultiOptimizer((p, p_opt), (p_nowd, p_nowd_opt)).create(params) else: optimizer = flax.optim.Adam( weight_decay=config.weight_decay).create(params) elif config.optim == "sgd": optimizer = flax.optim.Momentum( beta=config.sgd_momentum, nesterov=True).create(params) else: raise NotImplementedError(f"{config.optim} does not exist.") return model, TrainState(step=0, optimizer=optimizer, model_state=model_state)
google-research/nested-transformer
[ 172, 23, 172, 4, 1622524188 ]
def train_step( model: Any, state: TrainState, batch: Dict[str, jnp.ndarray], rng: np.ndarray, learning_rate_fn: Callable[[int], float], weight_decay: float, grad_clip_max_norm: Optional[float] = None
google-research/nested-transformer
[ 172, 23, 172, 4, 1622524188 ]
def loss_fn(params): variables = {"params": params} variables.update(state.model_state) logits, new_model_state = model(train=True).apply( variables, batch["image"], mutable=["batch_stats"], rngs={"dropout": rng}) loss = jnp.mean( losses.softmax_cross_entropy_loss(logits=logits, labels=batch["label"])) if weight_decay > 0: weight_penalty_params = jax.tree_leaves(variables["params"]) weight_l2 = sum( [jnp.sum(x**2) for x in weight_penalty_params if x.ndim > 1]) weight_penalty = weight_decay * 0.5 * weight_l2 loss = loss + weight_penalty new_model_state = dict(new_model_state) return loss, (new_model_state, logits)
google-research/nested-transformer
[ 172, 23, 172, 4, 1622524188 ]
def eval_step(model: Any, state: TrainState, batch: Dict[str, jnp.ndarray]) -> metrics.Collection: """Compute the metrics for the given model in inference mode. The model is applied to the inputs with train=False using all devices on the host. Afterwards metrics are averaged across *all* devices (of all hosts). Args: model: Flax module for the model. The apply method must take input images and a boolean argument indicating whether to use training or inference mode. state: Replicate model state. batch: Inputs that should be evaluated. Returns: Dictionary of the replicated metrics. """ logging.info("eval_step(batch=%s)", batch) variables = { "params": state.optimizer.target, } variables.update(state.model_state) logits = model(train=False).apply(variables, batch["image"], mutable=False) loss = jnp.mean( losses.cross_entropy_loss(logits=logits, labels=batch["label"])) return EvalMetrics.gather_from_model_output( logits=logits, labels=batch["label"], loss=loss, mask=batch.get("mask"), )
google-research/nested-transformer
[ 172, 23, 172, 4, 1622524188 ]
def __init__(self, name: str, init_step_num: int): self.name = name self.step_num = init_step_num
google-research/nested-transformer
[ 172, 23, 172, 4, 1622524188 ]
def __exit__(self, exc_type, exc_value, tb): self.context.__exit__(exc_type, exc_value, tb) self.context = None
google-research/nested-transformer
[ 172, 23, 172, 4, 1622524188 ]
def evaluate(model: nn.Module, state: TrainState, eval_ds: tf.data.Dataset, num_eval_steps: int = -1) -> Union[None, EvalMetrics]: """Evaluate the model on the given dataset.""" logging.info("Starting evaluation.") eval_metrics = None with StepTraceContextHelper("eval", 0) as trace_context: for step, batch in enumerate(eval_ds): # pytype: disable=wrong-arg-types batch = jax.tree_map(np.asarray, batch) metrics_update = flax_utils.unreplicate(eval_step(model, state, batch)) eval_metrics = ( metrics_update if eval_metrics is None else eval_metrics.merge(metrics_update)) if num_eval_steps > 0 and step + 1 == num_eval_steps: break trace_context.next_step() return eval_metrics
google-research/nested-transformer
[ 172, 23, 172, 4, 1622524188 ]
def get_coord(exifdict): ''' Purpose: The purpose of this script is to extract the Latitude and Longitude from the EXIF data Inputs: exifdict: structure storing the image's EXIF data. Outputs: coords: A tuple of the Latitude and Longitude in Decimal form Returns: (lat,lon) Assumptions: The EXIF data is valid. ''' values = exifdict['gps gpslatitude'][1:-1].split(", ") s = values[2] df = float(values[0]) mf = float(values[1]) smath = s.split("/") sf = float(smath[0])/float(smath[1]) lat = df + mf/60 + sf/3600 if exifdict['gps gpslatituderef'] == 'S': lat = lat*(-1) values = exifdict['gps gpslongitude'][1:-1].split(", ") s = values[2] df = float(values[0]) mf = float(values[1]) smath = s.split("/") sf = float(smath[0])/float(smath[1]) lon = df + mf/60 + sf/3600 if exifdict['gps gpslongituderef'] == 'W': lon = lon*(-1) return (lat,lon)
Aerolyzer/Aerolyzer
[ 7, 11, 7, 9, 1476309407 ]
def zip_to_coord(zipcode,googlegeokey): ''' Purpose: The purpose of this script is to convert ZIP Code to a Latitude and Longitude Inputs: zipcode: 5 digit long ZIP code. Outputs: coord: tuple holding latitude and longitude Returns: (lat,lon) Assumptions: The EXIF data is valid. ''' try: url = 'https://maps.googleapis.com/maps/api/geocode/json?address='+zipcode+'&key='+googlegeokey c = urllib2.urlopen(url) results = c.read() parsedResults = json.loads(results) lat = float(parsedResults['results'][0]['geometry']['location']['lat']) lon = float(parsedResults['results'][0]['geometry']['location']['lng']) except Exception: print "Unable to retrieve data: ", sys.exc_info()[0] (lat,lon) = (0.0,0.0) finally: return (lat,lon)
Aerolyzer/Aerolyzer
[ 7, 11, 7, 9, 1476309407 ]
def __init__(self, state): self.state = state
hycis/Pynet
[ 8, 2, 8, 1, 1402708844 ]
def get_random_graph() -> jraph.GraphsTuple: return jraph.GraphsTuple( n_node=np.asarray([NUM_NODES]), n_edge=np.asarray([NUM_EDGES]), nodes=np.random.normal(size=[NUM_NODES, EMBEDDING_SIZE]), edges=np.random.normal(size=[NUM_EDGES, EMBEDDING_SIZE]), globals=None, senders=np.random.randint(0, NUM_NODES, [NUM_EDGES]), receivers=np.random.randint(0, NUM_NODES, [NUM_EDGES]))
deepmind/jraph
[ 1114, 71, 1114, 9, 1606127232 ]
def update_edge_fn(edges, sender_nodes, receiver_nodes): # We will run an LSTM memory on the inputs first, and then # process the output of the LSTM with an MLP. edge_inputs = jnp.concatenate([edges.embedding, sender_nodes.embedding, receiver_nodes.embedding], axis=-1) lstm_output, updated_state = edge_fn_lstm(edge_inputs, edges.state) updated_edges = StatefulField( embedding=edge_fn_mlp(lstm_output), state=updated_state, ) return updated_edges
deepmind/jraph
[ 1114, 71, 1114, 9, 1606127232 ]
def main(_): network = hk.without_apply_rng(hk.transform(network_definition)) input_graph = get_random_graph() params = network.init(jax.random.PRNGKey(42), input_graph) output_graph = network.apply(params, input_graph) print(tree.tree_map(lambda x: x.shape, output_graph))
deepmind/jraph
[ 1114, 71, 1114, 9, 1606127232 ]
def setUpClass(cls): super(QuotasAdminNegativeTestJSON, cls).setUpClass() cls.client = cls.os.quotas_client cls.adm_client = cls.os_adm.quotas_client cls.sg_client = cls.security_groups_client # NOTE(afazekas): these test cases should always create and use a new # tenant most of them should be skipped if we can't do that cls.demo_tenant_id = cls.client.tenant_id
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_update_quota_normal_user(self): self.assertRaises(exceptions.Unauthorized, self.client.update_quota_set, self.demo_tenant_id, ram=0)
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_create_server_when_cpu_quota_is_full(self): # Disallow server creation when tenant's vcpu quota is full resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id) default_vcpu_quota = quota_set['cores'] vcpu_quota = 0 # Set the quota to zero to conserve resources resp, quota_set = self.adm_client.update_quota_set(self.demo_tenant_id, force=True, cores=vcpu_quota) self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id, cores=default_vcpu_quota) self.assertRaises(exceptions.Unauthorized, self.create_test_server)
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_create_server_when_memory_quota_is_full(self): # Disallow server creation when tenant's memory quota is full resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id) default_mem_quota = quota_set['ram'] mem_quota = 0 # Set the quota to zero to conserve resources self.adm_client.update_quota_set(self.demo_tenant_id, force=True, ram=mem_quota) self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id, ram=default_mem_quota) self.assertRaises(exceptions.Unauthorized, self.create_test_server)
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_create_server_when_instances_quota_is_full(self): # Once instances quota limit is reached, disallow server creation resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id) default_instances_quota = quota_set['instances'] instances_quota = 0 # Set quota to zero to disallow server creation self.adm_client.update_quota_set(self.demo_tenant_id, force=True, instances=instances_quota) self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id, instances=default_instances_quota) self.assertRaises(exceptions.Unauthorized, self.create_test_server)
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_security_groups_exceed_limit(self): # Negative test: Creation Security Groups over limit should FAIL resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id) default_sg_quota = quota_set['security_groups'] sg_quota = 0 # Set the quota to zero to conserve resources resp, quota_set =\ self.adm_client.update_quota_set(self.demo_tenant_id, force=True, security_groups=sg_quota) self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id, security_groups=default_sg_quota) # Check we cannot create anymore # A 403 Forbidden or 413 Overlimit (old behaviour) exception # will be raised when out of quota self.assertRaises((exceptions.Unauthorized, exceptions.OverLimit), self.sg_client.create_security_group, "sg-overlimit", "sg-desc")
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_security_groups_rules_exceed_limit(self): # Negative test: Creation of Security Group Rules should FAIL # when we reach limit maxSecurityGroupRules resp, quota_set = self.adm_client.get_quota_set(self.demo_tenant_id) default_sg_rules_quota = quota_set['security_group_rules'] sg_rules_quota = 0 # Set the quota to zero to conserve resources resp, quota_set =\ self.adm_client.update_quota_set( self.demo_tenant_id, force=True, security_group_rules=sg_rules_quota) self.addCleanup(self.adm_client.update_quota_set, self.demo_tenant_id, security_group_rules=default_sg_rules_quota) s_name = data_utils.rand_name('securitygroup-') s_description = data_utils.rand_name('description-') resp, securitygroup =\ self.sg_client.create_security_group(s_name, s_description) self.addCleanup(self.sg_client.delete_security_group, securitygroup['id']) secgroup_id = securitygroup['id'] ip_protocol = 'tcp' # Check we cannot create SG rule anymore # A 403 Forbidden or 413 Overlimit (old behaviour) exception # will be raised when out of quota self.assertRaises((exceptions.OverLimit, exceptions.Unauthorized), self.sg_client.create_security_group_rule, secgroup_id, ip_protocol, 1025, 1025)
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def get_option(option: str) -> str: try: return _options[option] except KeyError: return None
KarlGong/ptest
[ 56, 9, 56, 1, 1431668612 ]
def get_int_property(key: str, default: int = None) -> int: """ Get property value and convert it to int. If no property found, default value will be returned. """ try: return int(_properties[key]) except KeyError: return default
KarlGong/ptest
[ 56, 9, 56, 1, 1431668612 ]
def get_boolean_property(key: str, default: bool = None) -> bool: """ Get property value and convert it to boolean. If no property found, default value will be returned. """ try: value = _properties[key] if value.lower() == "true": return True elif value.lower() == "false": return False raise ValueError("could not convert string to boolean: %s" % value) except KeyError: return default
KarlGong/ptest
[ 56, 9, 56, 1, 1431668612 ]
def load(args): option_args, property_args = __load_args(args) _parse_options(option_args) _load_properties_from_file() _parse_properties(property_args)
KarlGong/ptest
[ 56, 9, 56, 1, 1431668612 ]
def __load_args(args): property_args = [] option_args = [] property_regex_str = r"^-D(.*?)=(.*?)$" # the format of property definition must be -D<key>=<value> property_regex = re.compile(property_regex_str) for arg in args: property_match = property_regex.search(arg) if property_match: property_args.append(arg) else: option_args.append(arg) return option_args, property_args
KarlGong/ptest
[ 56, 9, 56, 1, 1431668612 ]
def train_step(model, rng, state, batch, lr): """One optimization step. Args: model: The linen model. rng: jnp.ndarray, random number generator. state: utils.TrainState, state of the model/optimizer. batch: dict, a mini-batch of data for training. lr: float, real-time learning rate. Returns: new_state: utils.TrainState, new training state. stats: list. [(loss, psnr), (loss_coarse, psnr_coarse)]. rng: jnp.ndarray, updated random number generator. """ rng, key_0, key_1 = random.split(rng, 3) def loss_fn(variables): rays = batch["rays"] ret = model.apply(variables, key_0, key_1, rays, FLAGS.randomized) if len(ret) not in (1, 2): raise ValueError( "ret should contain either 1 set of output (coarse only), or 2 sets" "of output (coarse as ret[0] and fine as ret[1]).") # The main prediction is always at the end of the ret list. rgb, _, _, sigma, _, _ = ret[-1] loss = ((rgb - batch["pixels"][Ellipsis, :3])**2).mean() psnr = utils.compute_psnr(loss) if len(ret) > 1: # If there are both coarse and fine predictions, we compute the loss for # the coarse prediction (ret[0]) as well. rgb_c, _, _, sigma_c, _, _ = ret[0] loss_c = ((rgb_c - batch["pixels"][Ellipsis, :3])**2).mean() psnr_c = utils.compute_psnr(loss_c) sparsity_c = FLAGS.sparsity_strength * jax.numpy.log(1.0 + sigma_c**2 / 0.5).mean() else: loss_c = 0. psnr_c = 0. sparsity_c = 0.0 def tree_sum_fn(fn): return jax.tree_util.tree_reduce( lambda x, y: x + fn(y), variables, initializer=0) weight_l2 = ( tree_sum_fn(lambda z: jnp.sum(z**2)) / tree_sum_fn(lambda z: jnp.prod(jnp.array(z.shape)))) sparsity = FLAGS.sparsity_strength * jax.numpy.log(1.0 + sigma**2 / 0.5).mean() stats = utils.Stats( loss=loss, psnr=psnr, loss_c=loss_c, psnr_c=psnr_c, weight_l2=weight_l2, sparsity=sparsity, sparsity_c=sparsity_c) return (loss + loss_c + FLAGS.weight_decay_mult * weight_l2 + sparsity + sparsity_c), stats (_, stats), grad = ( jax.value_and_grad(loss_fn, has_aux=True)(state.optimizer.target)) grad = jax.lax.pmean(grad, axis_name="batch") stats = jax.lax.pmean(stats, axis_name="batch") # Clip the gradient by value. if FLAGS.grad_max_val > 0: clip_fn = lambda z: jnp.clip(z, -FLAGS.grad_max_val, FLAGS.grad_max_val) grad = jax.tree_util.tree_map(clip_fn, grad) # Clip the (possibly value-clipped) gradient by norm. if FLAGS.grad_max_norm > 0: grad_norm = jnp.sqrt( jax.tree_util.tree_reduce( lambda x, y: x + jnp.sum(y**2), grad, initializer=0)) mult = jnp.minimum(1, FLAGS.grad_max_norm / (1e-7 + grad_norm)) grad = jax.tree_util.tree_map(lambda z: mult * z, grad) new_optimizer = state.optimizer.apply_gradient(grad, learning_rate=lr) new_state = state.replace(optimizer=new_optimizer) return new_state, stats, rng
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def render_fn(variables, key_0, key_1, rays): return jax.lax.all_gather( model.apply(variables, key_0, key_1, rays, FLAGS.randomized), axis_name="batch")
google-research/google-research
[ 27788, 6881, 27788, 944, 1538678568 ]
def fetch(api_key, api_secret): # create an unauthenticated flickrapi object flickr=flickrapi.FlickrAPI(api_key, api_secret) print "Open the following URL in your browser " print "This Url >>>> %s" % flickr.web_login_url(perms='read') print "When you're ready press ENTER", raw_input() print "Copy and paste the URL (from theopenphotoproject.org) here: ", frob_url = raw_input() print "\nThanks!" print "Parsing URL for the token...", match = re.search('frob=([^&]+)', frob_url) frob = match.group(1) token = flickr.get_token(frob) print "OK" # create an authenticated flickrapi object flickr = flickrapi.FlickrAPI(api_key, api_secret, token=token) # now we get the authenticated user's id print "Fetching user id...", user_resp = flickr.urls_getUserProfile() user_fields = user_resp.findall('user')[0] user_id = user_fields.get('nsid') print "OK"
photo/export-flickr
[ 32, 6, 32, 3, 1328069987 ]
def createDirectorySafe( name ): if not os.path.exists(name): os.makedirs(name)
photo/export-flickr
[ 32, 6, 32, 3, 1328069987 ]
def constructUrl( photo ): return "http://farm%s.staticflickr.com/%s/%s_%s_o.%s" % (photo.get('farm'), photo.get('server'), photo.get('id'), photo.get('originalsecret'), photo.get('originalformat'))
photo/export-flickr
[ 32, 6, 32, 3, 1328069987 ]
def getLicense( num ): licenses = {} licenses['0'] = '' licenses['4'] = 'CC BY' licenses['5'] = 'CC BY-SA' licenses['6'] = 'CC BY-ND' licenses['2'] = 'CC BY-NC' licenses['1'] = 'CC BY-NC-SA' licenses['3'] = 'CC BY-NC-ND' if licenses[num] is None: return licenses[0] else: return licenses[num]
photo/export-flickr
[ 32, 6, 32, 3, 1328069987 ]
def __init__(self, config) : name = config['name'] if 'name' in config else 'esp8266' host = config['host'] port = config['port'] if 'port' in config else 0 self.retries = config['retries'] if 'retries' in config else 1 self.retain = config['retain'] if 'retain' in config else False self.qos = config['qos'] if 'qos' in config else 0 self.topic = config['topic'] if 'topic' in config else 'log' self.client = mqtt.simple.MQTTClient(client_id=name, server=host, port=port)
fadushin/esp8266
[ 70, 22, 70, 11, 1469665174 ]
def __init__(self): vstruct.VStruct.__init__(self) self.ExecuteDisable = v_uint8()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint32() self.ThreadPriority = v_uint32() self.PagePriority = v_uint32() self.IoPriority = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Revision = v_uint8() self.SubAuthorityCount = v_uint8() self.IdentifierAuthority = SID_IDENTIFIER_AUTHORITY() self.SubAuthority = vstruct.VArray([ v_uint32() for i in xrange(1) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.NtTib = NT_TIB32() self.EnvironmentPointer = v_uint32() self.ClientId = CLIENT_ID32() self.ActiveRpcHandle = v_uint32() self.ThreadLocalStoragePointer = v_uint32() self.ProcessEnvironmentBlock = v_uint32() self.LastErrorValue = v_uint32() self.CountOfOwnedCriticalSections = v_uint32() self.CsrClientThread = v_uint32() self.Win32ThreadInfo = v_uint32() self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ]) self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ]) self.WOW32Reserved = v_uint32() self.CurrentLocale = v_uint32() self.FpSoftwareStatusRegister = v_uint32() self.SystemReserved1 = vstruct.VArray([ v_uint32() for i in xrange(54) ]) self.ExceptionCode = v_uint32() self.ActivationContextStackPointer = v_uint32() self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(36) ]) self.TxFsContext = v_uint32() self.GdiTebBatch = GDI_TEB_BATCH32() self.RealClientId = CLIENT_ID32() self.GdiCachedProcessHandle = v_uint32() self.GdiClientPID = v_uint32() self.GdiClientTID = v_uint32() self.GdiThreadLocalInfo = v_uint32() self.Win32ClientInfo = vstruct.VArray([ v_uint32() for i in xrange(62) ]) self.glDispatchTable = vstruct.VArray([ v_uint32() for i in xrange(233) ]) self.glReserved1 = vstruct.VArray([ v_uint32() for i in xrange(29) ]) self.glReserved2 = v_uint32() self.glSectionInfo = v_uint32() self.glSection = v_uint32() self.glTable = v_uint32() self.glCurrentRC = v_uint32() self.glContext = v_uint32() self.LastStatusValue = v_uint32() self.StaticUnicodeString = STRING32() self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ]) self._pad0e0c = v_bytes(size=2) self.DeallocationStack = v_uint32() self.TlsSlots = vstruct.VArray([ v_uint32() for i in xrange(64) ]) self.TlsLinks = LIST_ENTRY32() self.Vdm = v_uint32() self.ReservedForNtRpc = v_uint32() self.DbgSsReserved = vstruct.VArray([ v_uint32() for i in xrange(2) ]) self.HardErrorMode = v_uint32() self.Instrumentation = vstruct.VArray([ v_uint32() for i in xrange(9) ]) self.ActivityId = GUID() self.SubProcessTag = v_uint32() self.PerflibData = v_uint32() self.EtwTraceData = v_uint32() self.WinSockData = v_uint32() self.GdiBatchCount = v_uint32() self.CurrentIdealProcessor = PROCESSOR_NUMBER() self.GuaranteedStackBytes = v_uint32() self.ReservedForPerf = v_uint32() self.ReservedForOle = v_uint32() self.WaitingOnLoaderLock = v_uint32() self.SavedPriorityState = v_uint32() self.ReservedForCodeCoverage = v_uint32() self.ThreadPoolData = v_uint32() self.TlsExpansionSlots = v_uint32() self.MuiGeneration = v_uint32() self.IsImpersonating = v_uint32() self.NlsCache = v_uint32() self.pShimData = v_uint32() self.HeapVirtualAffinity = v_uint16() self.LowFragHeapDataSlot = v_uint16() self.CurrentTransactionHandle = v_uint32() self.ActiveFrame = v_uint32() self.FlsData = v_uint32() self.PreferredLanguages = v_uint32() self.UserPrefLanguages = v_uint32() self.MergedPrefLanguages = v_uint32() self.MuiImpersonation = v_uint32() self.CrossTebFlags = v_uint16() self.SameTebFlags = v_uint16() self.TxnScopeEnterCallback = v_uint32() self.TxnScopeExitCallback = v_uint32() self.TxnScopeContext = v_uint32() self.LockCount = v_uint32() self.SpareUlong0 = v_uint32() self.ResourceRetValue = v_uint32() self.ReservedForWdf = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Signature = v_uint32() self.Version = v_uint32() self.Length = v_uint32() self.Flags = WHEA_ERROR_PACKET_FLAGS() self.ErrorType = v_uint32() self.ErrorSeverity = v_uint32() self.ErrorSourceId = v_uint32() self.ErrorSourceType = v_uint32() self.NotifyType = GUID() self.Context = v_uint64() self.DataFormat = v_uint32() self.Reserved1 = v_uint32() self.DataOffset = v_uint32() self.DataLength = v_uint32() self.PshedDataOffset = v_uint32() self.PshedDataLength = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Mask = v_uint32() self.Group = v_uint16() self.Reserved = vstruct.VArray([ v_uint16() for i in xrange(3) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Backlink = v_uint16() self.Reserved0 = v_uint16() self.Esp0 = v_uint32() self.Ss0 = v_uint16() self.Reserved1 = v_uint16() self.NotUsed1 = vstruct.VArray([ v_uint32() for i in xrange(4) ]) self.CR3 = v_uint32() self.Eip = v_uint32() self.EFlags = v_uint32() self.Eax = v_uint32() self.Ecx = v_uint32() self.Edx = v_uint32() self.Ebx = v_uint32() self.Esp = v_uint32() self.Ebp = v_uint32() self.Esi = v_uint32() self.Edi = v_uint32() self.Es = v_uint16() self.Reserved2 = v_uint16() self.Cs = v_uint16() self.Reserved3 = v_uint16() self.Ss = v_uint16() self.Reserved4 = v_uint16() self.Ds = v_uint16() self.Reserved5 = v_uint16() self.Fs = v_uint16() self.Reserved6 = v_uint16() self.Gs = v_uint16() self.Reserved7 = v_uint16() self.LDT = v_uint16() self.Reserved8 = v_uint16() self.Flags = v_uint16() self.IoMapBase = v_uint16() self.IoMaps = vstruct.VArray([ KiIoAccessMap() for i in xrange(1) ]) self.IntDirectionMap = vstruct.VArray([ v_uint8() for i in xrange(32) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.DosPath = UNICODE_STRING() self.Handle = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.TreeNode = RTL_BALANCED_NODE() self.ThreadUnsafe = v_ptr32() self.LockState = KLOCK_ENTRY_LOCK_STATE() self.OwnerTree = RTL_RB_TREE() self.WaiterTree = RTL_RB_TREE() self.EntryLock = v_uint32() self.AllBoosts = v_uint16() self.IoNormalPriorityWaiterCount = v_uint16()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Masks = vstruct.VArray([ v_uint32() for i in xrange(8) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.AuditMask = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Value = v_uint16() self.Type = v_uint8() self.HardCap = v_uint8() self.RelativeWeight = v_uint32() self.QueryHistoryTimeStamp = v_uint64() self.NotificationCycles = v_uint64() self.SchedulingGroupList = LIST_ENTRY() self.NotificationDpc = v_ptr32() self._pad0040 = v_bytes(size=28) self.PerProcessor = vstruct.VArray([ KSCB() for i in xrange(1) ]) self._pad0140 = v_bytes(size=32)
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Length64 = v_uint32() self.Alignment64 = v_uint32() self.MinimumAddress = LARGE_INTEGER() self.MaximumAddress = LARGE_INTEGER()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.SectionOffset = v_uint32() self.SectionLength = v_uint32() self.Revision = WHEA_REVISION() self.ValidBits = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS() self.Reserved = v_uint8() self.Flags = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS() self.SectionType = GUID() self.FRUId = GUID() self.SectionSeverity = v_uint32() self.FRUText = vstruct.VArray([ v_uint8() for i in xrange(20) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.IdType = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.DeviceTextType = v_uint32() self.LocaleId = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.PromotePercent = v_uint8() self.DemotePercent = v_uint8() self.PromotePercentBase = v_uint8() self.DemotePercentBase = v_uint8() self.AllowScaling = v_uint8()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Magic = v_uint32() self.Database = v_ptr32() self.NextSegment = v_ptr32() self.TotalSize = v_uint32() self.SegmentStart = v_ptr32() self.SegmentEnd = v_ptr32() self.SegmentFree = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.InstantaneousRead = v_ptr32() self._pad0008 = v_bytes(size=4) self.LastActualCount = v_uint64() self.LastReferenceCount = v_uint64() self.CachedValue = v_uint32() self._pad0020 = v_bytes(size=4) self.Affinitized = v_uint8() self.Differential = v_uint8() self.DisableInterrupts = v_uint8() self._pad0024 = v_bytes(size=1) self.Context = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint16() self.Revision = v_uint16() self.Count = v_uint32() self.PartialDescriptors = vstruct.VArray([ CM_PARTIAL_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self.RangeBitmap = RTL_BITMAP()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.RemappedFormat = ULARGE_INTEGER()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Size = v_uint16() self.Version = v_uint16() self.DeviceD1 = v_uint32() self.Address = v_uint32() self.UINumber = v_uint32() self.DeviceState = vstruct.VArray([ DEVICE_POWER_STATE() for i in xrange(7) ]) self.SystemWake = v_uint32() self.DeviceWake = v_uint32() self.D1Latency = v_uint32() self.D2Latency = v_uint32() self.D3Latency = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.bits = _unnamed_12568()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.IoResourceRequirementList = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Header = DISPATCHER_HEADER() self.ProfileListHead = LIST_ENTRY() self.DirectoryTableBase = v_uint32() self.LdtDescriptor = KGDTENTRY() self.Int21Descriptor = KIDTENTRY() self.ThreadListHead = LIST_ENTRY() self.ProcessLock = v_uint32() self.Affinity = KAFFINITY_EX() self.ReadyListHead = LIST_ENTRY() self.SwapListEntry = SINGLE_LIST_ENTRY() self.ActiveProcessors = KAFFINITY_EX() self.AutoAlignment = v_uint32() self.BasePriority = v_uint8() self.QuantumReset = v_uint8() self.Visited = v_uint8() self.Flags = KEXECUTE_OPTIONS() self.ThreadSeed = vstruct.VArray([ v_uint32() for i in xrange(1) ]) self.IdealNode = vstruct.VArray([ v_uint16() for i in xrange(1) ]) self.IdealGlobalNode = v_uint16() self.Spare1 = v_uint16() self.IopmOffset = v_uint16() self.SchedulingGroup = v_ptr32() self.StackCount = KSTACK_COUNT() self.ProcessListEntry = LIST_ENTRY() self.CycleTime = v_uint64() self.ContextSwitches = v_uint64() self.FreezeCount = v_uint32() self.KernelTime = v_uint32() self.UserTime = v_uint32() self.VdmTrapcHandler = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Start = v_uint32() self.Length = v_uint32() self.Reserved = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self)
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Length = v_uint32() self.EaList = v_ptr32() self.EaListLength = v_uint32() self.EaIndex = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Allocs = v_uint32() self.Frees = v_uint32() self.Size = v_uint32() self.TagIndex = v_uint16() self.CreatorBackTraceIndex = v_uint16() self.TagName = vstruct.VArray([ v_uint16() for i in xrange(24) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.SecurityContext = v_ptr32() self.Options = v_uint32() self.Reserved = v_uint16() self.ShareAccess = v_uint16() self.Parameters = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Context = v_ptr32() self._pad0008 = v_bytes(size=4) self.Constraints = PROCESSOR_IDLE_CONSTRAINTS() self.DependencyCount = v_uint32() self.DependencyUsed = v_uint32() self.DependencyArray = v_ptr32() self.PlatformIdleStateIndex = v_uint32() self.ProcessorIdleStateIndex = v_uint32() self.IdleSelectFailureMask = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Primary = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Version = v_uint32() self.Pool = v_ptr32() self.CleanupGroup = v_ptr32() self.CleanupGroupCancelCallback = v_ptr32() self.RaceDll = v_ptr32() self.ActivationContext = v_ptr32() self.FinalizationCallback = v_ptr32() self.u = _unnamed_6606() self.CallbackPriority = v_uint32() self.Size = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Previous = v_ptr32() self.ActivationContext = v_ptr32() self.Flags = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Lock = EX_PUSH_LOCK() self.ViewListHead = LIST_ENTRY() self.PagedPoolQuotaCache = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.HandleAttributes = v_uint32() self.GrantedAccess = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Link = LIST_ENTRY() self.Master = v_ptr32() self.Members = KAFFINITY_EX() self.ProcessorCount = v_uint32() self.Processors = v_ptr32() self.GetFFHThrottleState = v_ptr32() self.BoostPolicyHandler = v_ptr32() self.BoostModeHandler = v_ptr32() self.PerfSelectionHandler = v_ptr32() self.PerfControlHandler = v_ptr32() self.MaxFrequency = v_uint32() self.NominalFrequency = v_uint32() self.MaxPercent = v_uint32() self.MinPerfPercent = v_uint32() self.MinThrottlePercent = v_uint32() self.Coordination = v_uint8() self.HardPlatformCap = v_uint8() self.AffinitizeControl = v_uint8() self._pad004c = v_bytes(size=1) self.SelectedPercent = v_uint32() self.SelectedFrequency = v_uint32() self.DesiredPercent = v_uint32() self.MaxPolicyPercent = v_uint32() self.MinPolicyPercent = v_uint32() self.ConstrainedMaxPercent = v_uint32() self.ConstrainedMinPercent = v_uint32() self.GuaranteedPercent = v_uint32() self.TolerancePercent = v_uint32() self.SelectedState = v_uint64() self.Force = v_uint8() self._pad0080 = v_bytes(size=7) self.PerfChangeTime = v_uint64() self.PerfChangeIntervalCount = v_uint32() self._pad0090 = v_bytes(size=4)
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.EnabledFeatures = v_uint64() self.EnabledVolatileFeatures = v_uint64() self.Size = v_uint32() self.OptimizedSave = v_uint32() self.Features = vstruct.VArray([ XSTATE_FEATURE() for i in xrange(64) ])
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.ImpersonationData = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.BalancedRoot = RTL_BALANCED_LINKS() self.OrderedPointer = v_ptr32() self.WhichOrderedElement = v_uint32() self.NumberGenericTableElements = v_uint32() self.DepthOfTree = v_uint32() self.RestartKey = v_ptr32() self.DeleteCount = v_uint32() self.CompareRoutine = v_ptr32() self.AllocateRoutine = v_ptr32() self.FreeRoutine = v_ptr32() self.TableContext = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Affinitized = v_uint32() self.Performance = v_uint32() self.Total = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.SizeOfFsFilterCallbacks = v_uint32() self.Reserved = v_uint32() self.PreAcquireForSectionSynchronization = v_ptr32() self.PostAcquireForSectionSynchronization = v_ptr32() self.PreReleaseForSectionSynchronization = v_ptr32() self.PostReleaseForSectionSynchronization = v_ptr32() self.PreAcquireForCcFlush = v_ptr32() self.PostAcquireForCcFlush = v_ptr32() self.PreReleaseForCcFlush = v_ptr32() self.PostReleaseForCcFlush = v_ptr32() self.PreAcquireForModifiedPageWriter = v_ptr32() self.PostAcquireForModifiedPageWriter = v_ptr32() self.PreReleaseForModifiedPageWriter = v_ptr32() self.PostReleaseForModifiedPageWriter = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.OwnerThread = v_uint32() self.IoPriorityBoosted = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Type = v_uint16() self.Size = v_uint16() self.DeviceObject = v_ptr32() self.PowerFlags = v_uint32() self.Dope = v_ptr32() self.ExtensionFlags = v_uint32() self.DeviceNode = v_ptr32() self.AttachedTo = v_ptr32() self.StartIoCount = v_uint32() self.StartIoKey = v_uint32() self.StartIoFlags = v_uint32() self.Vpb = v_ptr32() self.DependencyNode = v_ptr32() self.VerifierContext = v_ptr32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.StackBase = v_uint32() self.ActualLimit = v_uint32() self.PreviousTrapFrame = v_ptr32() self.PreviousExceptionList = v_ptr32() self.Previous = KERNEL_STACK_SEGMENT()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Start = LARGE_INTEGER() self.Length = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.LocalData = v_ptr32() self.ActiveSubsegment = v_ptr32() self.CachedItems = vstruct.VArray([ v_ptr32() for i in xrange(16) ]) self.SListHeader = SLIST_HEADER() self.Counters = HEAP_BUCKET_COUNTERS() self.LastOpSequence = v_uint32() self.BucketIndex = v_uint16() self.LastUsed = v_uint16() self.NoThrashCount = v_uint16() self._pad0068 = v_bytes(size=6)
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.VolatileLowValue = v_uint32() self.HighValue = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.TotalMemoryReserved = v_uint32() self.TotalMemoryCommitted = v_uint32() self.TotalMemoryLargeUCR = v_uint32() self.TotalSizeInVirtualBlocks = v_uint32() self.TotalSegments = v_uint32() self.TotalUCRs = v_uint32() self.CommittOps = v_uint32() self.DeCommitOps = v_uint32() self.LockAcquires = v_uint32() self.LockCollisions = v_uint32() self.CommitRate = v_uint32() self.DecommittRate = v_uint32() self.CommitFailures = v_uint32() self.InBlockCommitFailures = v_uint32() self.PollIntervalCounter = v_uint32() self.DecommitsSinceLastCheck = v_uint32() self.HeapPollInterval = v_uint32() self.AllocAndFreeOps = v_uint32() self.AllocationIndicesActive = v_uint32() self.InBlockDeccommits = v_uint32() self.InBlockDeccomitSize = v_uint32() self.HighWatermarkSize = v_uint32() self.LastPolledSize = v_uint32()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.MailslotQuota = v_uint32() self.MaximumMessageSize = v_uint32() self.ReadTimeout = LARGE_INTEGER() self.TimeoutSpecified = v_uint8() self._pad0018 = v_bytes(size=7)
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.SizeOfFsFilterCallbackData = v_uint32() self.Operation = v_uint8() self.Reserved = v_uint8() self._pad0008 = v_bytes(size=2) self.DeviceObject = v_ptr32() self.FileObject = v_ptr32() self.Parameters = FS_FILTER_PARAMETERS()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.DomainMembers = KAFFINITY_EX() self.Latency = v_uint32() self.BreakEvenDuration = v_uint32() self.Power = v_uint32() self.StateFlags = v_uint32() self.VetoAccounting = PPM_VETO_ACCOUNTING() self.StateType = v_uint8() self.InterruptsEnabled = v_uint8() self.Interruptible = v_uint8() self.ContextRetained = v_uint8() self.CacheCoherent = v_uint8() self.WakesSpuriously = v_uint8() self.PlatformOnly = v_uint8() self.NoCState = v_uint8()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.Lock = v_uint32() self._pad0008 = v_bytes(size=4) self.CyclesLast = v_uint64() self.CyclesActive = v_uint64() self.Counters = vstruct.VArray([ v_ptr32() for i in xrange(2) ]) self.LastUpdateTime = v_uint64() self.UnscaledTime = v_uint64() self.UnaccountedTime = v_uint64() self.ScaledTime = vstruct.VArray([ v_uint64() for i in xrange(2) ]) self.UnaccountedKernelTime = v_uint64() self.PerformanceScaledKernelTime = v_uint64() self.UserTimeLast = v_uint32() self.KernelTimeLast = v_uint32() self.KernelTimesIndex = v_uint8() self._pad0068 = v_bytes(size=7)
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self) self.OperationID = LUID() self.SecurityEvaluated = v_uint8() self.GenerateAudit = v_uint8() self.GenerateOnClose = v_uint8() self.PrivilegesAllocated = v_uint8() self.Flags = v_uint32() self.RemainingDesiredAccess = v_uint32() self.PreviouslyGrantedAccess = v_uint32() self.OriginalDesiredAccess = v_uint32() self.SubjectSecurityContext = SECURITY_SUBJECT_CONTEXT() self.SecurityDescriptor = v_ptr32() self.AuxData = v_ptr32() self.Privileges = _unnamed_8729() self.AuditPrivileges = v_uint8() self._pad0064 = v_bytes(size=3) self.ObjectName = UNICODE_STRING() self.ObjectTypeName = UNICODE_STRING()
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]
def __init__(self): vstruct.VStruct.__init__(self)
atlas0fd00m/CanCat
[ 169, 34, 169, 6, 1428517921 ]