code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Generator(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _gen_one(cls): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def gen_many(cls, session, num): <NEW_LINE> <INDENT> pass | Some common methods for *Generator classes to inherit. | 6259907c67a9b606de5477bd |
class Project(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'project' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> project_name = db.Column(db.String(64), unique=True, index=True) <NEW_LINE> project_address = db.Column(db.String(256), unique=True, index=True) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey('user.id')) <NEW_LINE> dataSource_id = db.Column(db.Integer, db.ForeignKey('data_source.id')) | 项目类 | 6259907ce1aae11d1e7cf528 |
class Viewport(object): <NEW_LINE> <INDENT> def __init__(self, width, height): <NEW_LINE> <INDENT> self.w = width <NEW_LINE> self.h = height <NEW_LINE> self.zoom = 0 <NEW_LINE> self.c = (width/2, height/2) <NEW_LINE> <DEDENT> def zoomIn(self): <NEW_LINE> <INDENT> self.zoom += 1 <NEW_LINE> <DEDENT> def zoomOut(self): <NEW_LINE> <INDENT> self.zoom -= 1 <NEW_LINE> <DEDENT> def width(self): <NEW_LINE> <INDENT> return int((self.w * (2**self.zoom)) + 0.5) <NEW_LINE> <DEDENT> def height(self): <NEW_LINE> <INDENT> return int((self.h * (2**self.zoom)) + 0.5) <NEW_LINE> <DEDENT> def center(self): <NEW_LINE> <INDENT> return self.c <NEW_LINE> <DEDENT> def set_pos(self, pos): <NEW_LINE> <INDENT> self.c = pos <NEW_LINE> <DEDENT> def set_zoom(self, newzoom): <NEW_LINE> <INDENT> self.zoom = newzoom <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return (self.width(), self.height()) <NEW_LINE> <DEDENT> def top(self): <NEW_LINE> <INDENT> return self.c[1] - (self.height()/2) <NEW_LINE> <DEDENT> def left(self): <NEW_LINE> <INDENT> return self.c[0] - (self.width()/2) <NEW_LINE> <DEDENT> def bottom(self): <NEW_LINE> <INDENT> return self.top() + self.height() <NEW_LINE> <DEDENT> def right(self): <NEW_LINE> <INDENT> return self.left() + self.width() <NEW_LINE> <DEDENT> def topleft(self): <NEW_LINE> <INDENT> return (self.top(), self.left()) <NEW_LINE> <DEDENT> def bottomright(self): <NEW_LINE> <INDENT> return (self.bottom(), self.right()) <NEW_LINE> <DEDENT> def rect(self): <NEW_LINE> <INDENT> return pygame.Rect(self.left(), self.top(), self.width(), self.height()) <NEW_LINE> <DEDENT> def scrollUp(self, py, rect): <NEW_LINE> <INDENT> y = self.top() - py <NEW_LINE> if (y < rect.top): <NEW_LINE> <INDENT> y = rect.top <NEW_LINE> <DEDENT> y += (self.height()/2) <NEW_LINE> self.c = (self.c[0], y) <NEW_LINE> <DEDENT> def scrollDown(self, py, rect): <NEW_LINE> <INDENT> y = self.bottom() + py <NEW_LINE> if (y > rect.bottom): <NEW_LINE> <INDENT> y = rect.bottom <NEW_LINE> <DEDENT> y -= (self.height()/2) <NEW_LINE> self.c = (self.c[0], y) <NEW_LINE> <DEDENT> def scrollLeft(self, px, rect): <NEW_LINE> <INDENT> x = self.left() - px <NEW_LINE> if (x < rect.left): <NEW_LINE> <INDENT> x = rect.left <NEW_LINE> <DEDENT> x += (self.width()/2) <NEW_LINE> self.c = (x, self.c[1]) <NEW_LINE> <DEDENT> def scrollRight(self, px, rect): <NEW_LINE> <INDENT> x = self.right() + px <NEW_LINE> if (x > rect.right): <NEW_LINE> <INDENT> x = rect.right <NEW_LINE> <DEDENT> x -= (self.width()/2) <NEW_LINE> self.c = (x, self.c[1]) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return pygame.Rect.__str__(self.rect()) <NEW_LINE> <DEDENT> def point_to_grid_pos(self, pos): <NEW_LINE> <INDENT> return (int((pos[0] * 2**self.zoom)) + self.left(), int((pos[1] * 2**self.zoom) + self.top())) | Create a viewport.
| 6259907c091ae3566870666d |
@requires(ProcessTrainData) <NEW_LINE> class GBTLearn(luigi.Task): <NEW_LINE> <INDENT> n_evals = luigi.IntParameter(default=75, description="XGB rounds") <NEW_LINE> desired_sample_size = luigi.IntParameter(default=30000, description="Sample size for XGB watchlist") <NEW_LINE> features = luigi.ListParameter(default=DEFAULT_FEATURES, description="Features to use for training") <NEW_LINE> def run(self): <NEW_LINE> <INDENT> data_train = pd.read_hdf(self.input().path) <NEW_LINE> data_lagged_features = pd.read_hdf("./data/processed/client_product_semana_features.h5") <NEW_LINE> data_train = data_train.merge(data_lagged_features, how='left', on=["Cliente_ID", "Producto_ID", "Semana"]) <NEW_LINE> max_sample_size = min(max(int(data_train.size * 0.005), 1), self.desired_sample_size) <NEW_LINE> watchlist_sampled = data_train.sample(n=max_sample_size, random_state=1) <NEW_LINE> non_watchlist_sample = data_train.drop(watchlist_sampled.index) <NEW_LINE> watchlist = xgb.DMatrix(watchlist_sampled[self.features], label=watchlist_sampled['target'], missing=np.nan) <NEW_LINE> evals = [(watchlist, 'eval')] <NEW_LINE> dtrain = xgb.DMatrix(non_watchlist_sample[self.features], label=non_watchlist_sample['target'], missing=np.nan) <NEW_LINE> xgb_fit = xgb.train(XGB_PARAMS, dtrain, self.n_evals, evals=evals, verbose_eval=1, early_stopping_rounds=10, maximize=False) <NEW_LINE> xgb_fit.save_model(self.output().path) <NEW_LINE> <DEDENT> def output(self): <NEW_LINE> <INDENT> return luigi.LocalTarget("./data/models/xgb.model") | Train model on the training dataset and save the model afterwards. | 6259907c23849d37ff852ae7 |
class dstat_plugin(dstat): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = 'most expensive' <NEW_LINE> self.vars = ('memory process',) <NEW_LINE> self.type = 's' <NEW_LINE> self.width = 17 <NEW_LINE> self.scale = 0 <NEW_LINE> <DEDENT> def extract(self): <NEW_LINE> <INDENT> self.val['max'] = 0.0 <NEW_LINE> for pid in proc_pidlist(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> l = proc_splitline('/proc/%s/stat' % pid) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if len(l) < 23: continue <NEW_LINE> usage = int(l[23]) * pagesize <NEW_LINE> if usage <= self.val['max']: continue <NEW_LINE> self.val['max'] = usage <NEW_LINE> self.val['name'] = getnamebypid(pid, l[1][1:-1]) <NEW_LINE> self.val['pid'] = pid <NEW_LINE> <DEDENT> self.val['memory process'] = '%-*s%s' % (self.width-5, self.val['name'][0:self.width-5], cprint(self.val['max'], 'f', 5, 1024)) <NEW_LINE> <DEDENT> def showcsv(self): <NEW_LINE> <INDENT> return '%s / %d%%' % (self.val['name'], self.val['max']) | Most expensive CPU process.
Displays the process that uses the CPU the most during the monitored
interval. The value displayed is the percentage of CPU time for the total
amount of CPU processing power. Based on per process CPU information. | 6259907c5fdd1c0f98e5f9ae |
@unittest.skipUnless(luchador.get_nn_backend() == 'theano', 'Theano backend') <NEW_LINE> class TestGetVariable(_ScopeTestCase): <NEW_LINE> <INDENT> def test_get_variable_reuse_variable(self): <NEW_LINE> <INDENT> scope = self.get_scope() <NEW_LINE> var1 = nn.make_variable(scope, shape=[3, 1]) <NEW_LINE> be._set_flag(True) <NEW_LINE> var2 = nn.get_variable(scope) <NEW_LINE> self.assertIs( var1.unwrap(), var2.unwrap(), 'Reused variable should be identical to the original variable' ) <NEW_LINE> <DEDENT> def test_get_variable_raises_when_reuseing_non_existent_variable(self): <NEW_LINE> <INDENT> be._set_flag(True) <NEW_LINE> try: <NEW_LINE> <INDENT> nn.get_variable('non_existing_variable_name') <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fail( 'get_variable should raise when ' 'trying to reuse non existent variable.' ) <NEW_LINE> <DEDENT> <DEDENT> def test_make_variable_raises_when_creating_existing_variable(self): <NEW_LINE> <INDENT> scope = self.get_scope() <NEW_LINE> nn.make_variable(scope, shape=[3, 1]) <NEW_LINE> try: <NEW_LINE> <INDENT> nn.make_variable(scope, shape=[3, 1]) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fail( 'make_variable should raise when ' 'trying to create variable already exists.' ) | Test if get_variable correctly retrieve/create Variable | 6259907c44b2445a339b7675 |
class displayTradersData(BrowserView): <NEW_LINE> <INDENT> pass | display all informations about traders, including private data
like email adress or phone | 6259907c9c8ee82313040e9f |
class PipeConnection(Connection): <NEW_LINE> <INDENT> def connect(self, pipe_socket): <NEW_LINE> <INDENT> self._conn = pipe_socket <NEW_LINE> self._conn.connect() <NEW_LINE> return self | Connection type for pipes. | 6259907c3617ad0b5ee07b7d |
class MreIdlm(MelRecord): <NEW_LINE> <INDENT> rec_sig = b'IDLM' <NEW_LINE> _flags = Flags.from_names('runInSequence', None, 'doOnce') <NEW_LINE> melSet = MelSet( MelEdid(), MelBounds(), MelUInt8Flags(b'IDLF', u'flags', _flags), MelPartialCounter(MelTruncatedStruct( b'IDLC', [u'B', u'3s'], 'animation_count', 'unused', old_versions={'B'}), counter='animation_count', counts='animations'), MelFloat(b'IDLT', 'idleTimerSetting'), MelFidList(b'IDLA','animations'), ) <NEW_LINE> __slots__ = melSet.getSlotsUsed() | Idle Marker. | 6259907c60cbc95b06365a85 |
class RoleModeViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = RoleSerializer <NEW_LINE> queryset = Role.objects.all() | 查看和编辑角色实例的视图集。 | 6259907c7b180e01f3e49d7d |
class LeaseClientManager(base.BaseClientManager): <NEW_LINE> <INDENT> def create(self, name, start, end, reservations, events): <NEW_LINE> <INDENT> values = {'name': name, 'start_date': start, 'end_date': end, 'reservations': reservations, 'events': events} <NEW_LINE> return self._create('/leases', values, 'lease') <NEW_LINE> <DEDENT> def get(self, lease_id): <NEW_LINE> <INDENT> return self._get('/leases/%s' % lease_id, 'lease') <NEW_LINE> <DEDENT> def update(self, lease_id, name=None, prolong_for=None): <NEW_LINE> <INDENT> values = {} <NEW_LINE> if name: <NEW_LINE> <INDENT> values['name'] = name <NEW_LINE> <DEDENT> if prolong_for: <NEW_LINE> <INDENT> if prolong_for.endswith('s'): <NEW_LINE> <INDENT> coefficient = 1 <NEW_LINE> <DEDENT> elif prolong_for.endswith('m'): <NEW_LINE> <INDENT> coefficient = 60 <NEW_LINE> <DEDENT> elif prolong_for.endswith('h'): <NEW_LINE> <INDENT> coefficient = 60 * 60 <NEW_LINE> <DEDENT> elif prolong_for.endswith('d'): <NEW_LINE> <INDENT> coefficient = 24 * 60 * 60 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exception.ClimateClientException("Unsupportable date " "format for lease " "prolonging.") <NEW_LINE> <DEDENT> values['prolong_for'] = int(prolong_for[:-1]) * coefficient <NEW_LINE> <DEDENT> if not values: <NEW_LINE> <INDENT> return 'No values to update passed.' <NEW_LINE> <DEDENT> return self._update('/leases/%s' % lease_id, values, response_key='lease') <NEW_LINE> <DEDENT> def delete(self, lease_id): <NEW_LINE> <INDENT> self._delete('/leases/%s' % lease_id) <NEW_LINE> <DEDENT> def list(self): <NEW_LINE> <INDENT> return self._get('/leases', 'leases') | Manager for the lease connected requests. | 6259907c76e4537e8c3f0faf |
class rowtransposition(): <NEW_LINE> <INDENT> def __init__(self, key, text, to_encrypt): <NEW_LINE> <INDENT> self.key = str(key) <NEW_LINE> self.text = list(text) <NEW_LINE> self.textLength = len(self.text) <NEW_LINE> self.keyLength = len(self.key) <NEW_LINE> self.rowNum = math.ceil(self.textLength / self.keyLength) <NEW_LINE> self.table = [] <NEW_LINE> self.to_encrypt = to_encrypt <NEW_LINE> self.set_table() <NEW_LINE> self.newKey = self.generatenewKey() <NEW_LINE> if self.to_encrypt: <NEW_LINE> <INDENT> self.encrypt() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.decrypt() <NEW_LINE> <DEDENT> <DEDENT> def set_table(self): <NEW_LINE> <INDENT> for row in range(self.rowNum): <NEW_LINE> <INDENT> self.table.append([]) <NEW_LINE> for column in range(self.keyLength): <NEW_LINE> <INDENT> self.table[row].append(' ') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def generatenewKey(self): <NEW_LINE> <INDENT> newKey = [] <NEW_LINE> for i in range(self.keyLength): <NEW_LINE> <INDENT> newKey.append(int(self.key[i])) <NEW_LINE> <DEDENT> return newKey <NEW_LINE> <DEDENT> def encrypt(self): <NEW_LINE> <INDENT> row = 0 <NEW_LINE> count = 0 <NEW_LINE> spaces = 0 <NEW_LINE> alphabet = list("abcdefghijklmnopqrstuvwxyz") <NEW_LINE> ciphertext = "" <NEW_LINE> for row in range(self.rowNum): <NEW_LINE> <INDENT> for column in range(self.keyLength): <NEW_LINE> <INDENT> if count >= self.textLength: <NEW_LINE> <INDENT> self.table[row][column] = " " <NEW_LINE> spaces += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.table[row][column] = self.text[count] <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> <DEDENT> if spaces > 0: <NEW_LINE> <INDENT> for row in range(self.rowNum): <NEW_LINE> <INDENT> for column in range(self.keyLength): <NEW_LINE> <INDENT> if self.table[row][column] == ' ': <NEW_LINE> <INDENT> self.table[row][column] = alphabet[len(alphabet) - spaces] <NEW_LINE> spaces -= 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> print("We are going to encrypt the plaintext " + ''.join(self.text), end=' ' + "with key " + str(self.key)) <NEW_LINE> print("\n") <NEW_LINE> for i in range(len(self.newKey)): <NEW_LINE> <INDENT> col = self.newKey[i] - 1 <NEW_LINE> for row in range(self.rowNum): <NEW_LINE> <INDENT> ciphertext += self.table[row][col] <NEW_LINE> <DEDENT> <DEDENT> print("\nThe ciphertext is", ciphertext) <NEW_LINE> <DEDENT> def decrypt(self): <NEW_LINE> <INDENT> row = 0 <NEW_LINE> count = 0 <NEW_LINE> plaintext = "" <NEW_LINE> for i in range(len(self.newKey)): <NEW_LINE> <INDENT> column = self.newKey[i] - 1 <NEW_LINE> for row in range(self.rowNum): <NEW_LINE> <INDENT> self.table[row][column] = self.text[count] <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> <DEDENT> print("We are going to decrypt the ciphertext " + ''.join(self.text), end=' ' + "with key " + str(self.key)) <NEW_LINE> print("\n") <NEW_LINE> for row in range(self.rowNum): <NEW_LINE> <INDENT> for column in range(self.keyLength): <NEW_LINE> <INDENT> plaintext += self.table[row][column] <NEW_LINE> <DEDENT> <DEDENT> print("\nThe plaintext is", plaintext) | Row Transposition Cipher | 6259907c23849d37ff852ae9 |
class DiscoverProcessTrees: <NEW_LINE> <INDENT> def __init__(self, settings): <NEW_LINE> <INDENT> self.settings = settings <NEW_LINE> self.cmd = "/Applications/ProM-6.7.app/Contents/Resources/ProM67cli.sh -f /Applications/ProM-6.7.app/Contents/Resources/xes-inductive-miner.sh" <NEW_LINE> <DEDENT> def mineTree(self, names): <NEW_LINE> <INDENT> folder = self.settings['folder_name'][:-1] <NEW_LINE> def run(): <NEW_LINE> <INDENT> while len(glob.glob('current_file/*')) >= 1: <NEW_LINE> <INDENT> time.sleep(5) <NEW_LINE> print('Had to wait because another process is using PROM') <NEW_LINE> <DEDENT> open('current_file/'+folder, 'a').close() <NEW_LINE> proc = subprocess.Popen(self.cmd.split(" "), stdout=open("/dev/null", "w"), cwd="/Applications/ProM-6.7.app/Contents/Resources/", stderr=open("/dev/null", "w")) <NEW_LINE> timer = Timer(40, proc.kill) <NEW_LINE> try: <NEW_LINE> <INDENT> timer.start() <NEW_LINE> proc.communicate() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> timer.cancel() <NEW_LINE> if os.path.exists('current_file/'+folder): <NEW_LINE> <INDENT> os.remove('current_file/'+folder) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def check_if_all_files(names): <NEW_LINE> <INDENT> for name in names: <NEW_LINE> <INDENT> if not os.path.exists('xes_temp_output/'+folder+'/'+name): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> i = 0 <NEW_LINE> while check_if_all_files(names) == False: <NEW_LINE> <INDENT> i+=1 <NEW_LINE> print ('running Prom...(iteration',i,')') <NEW_LINE> run() <NEW_LINE> <DEDENT> print('end Prom...') | Call the inductive miner available in Prom using the cli
see readme file from github | 6259907c3317a56b869bf25e |
class SplitsComponentStateRefMut(SplitsComponentStateRef): <NEW_LINE> <INDENT> def __init__(self, ptr): <NEW_LINE> <INDENT> self.ptr = ptr | The state object that describes a single segment's information to visualize.
| 6259907c4f88993c371f123a |
class OldEvent(models.Model): <NEW_LINE> <INDENT> type = models.ForeignKey(OldEventType) <NEW_LINE> shortDescription = models.CharField(max_length=255, verbose_name="Short Description", help_text="This text is displayed on the events index.") <NEW_LINE> location = models.ForeignKey(OldLocation) <NEW_LINE> longDescription = models.TextField(verbose_name="Long Description", help_text="This text is displayed on the details page for this event.") <NEW_LINE> start = models.DateTimeField(default=datetime.now) <NEW_LINE> finish = models.DateTimeField(default=lambda: datetime.now() + timedelta(hours=1)) <NEW_LINE> displayFrom = models.DateTimeField(default=datetime.now, verbose_name="Display From", help_text="This controls when the event will be visible in the index and feeds.") <NEW_LINE> cancelled = models.BooleanField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'events_event' | Represents a single event | 6259907c26068e7796d4e370 |
class DiffuserWifiSensor(DiffuserEntity, SensorEntity): <NEW_LINE> <INDENT> _attr_device_class = SensorDeviceClass.SIGNAL_STRENGTH <NEW_LINE> _attr_native_unit_of_measurement = PERCENTAGE <NEW_LINE> _attr_entity_category = EntityCategory.DIAGNOSTIC <NEW_LINE> def __init__( self, diffuser: Diffuser, coordinator: RitualsDataUpdateCoordinator ) -> None: <NEW_LINE> <INDENT> super().__init__(diffuser, coordinator, WIFI_SUFFIX) <NEW_LINE> <DEDENT> @property <NEW_LINE> def native_value(self) -> int: <NEW_LINE> <INDENT> return self._diffuser.wifi_percentage | Representation of a diffuser wifi sensor. | 6259907c167d2b6e312b82ac |
class G0W0Work(Work): <NEW_LINE> <INDENT> def __init__(self, scf_input, nscf_input, scr_input, sigma_inputs, workdir=None, manager=None, spread_scr=False, nksmall=None): <NEW_LINE> <INDENT> super(G0W0Work, self).__init__(workdir=workdir, manager=manager) <NEW_LINE> if isinstance(scf_input, (list, tuple)): <NEW_LINE> <INDENT> for single_scf_input in scf_input: <NEW_LINE> <INDENT> self.scf_task = self.register_scf_task(single_scf_input) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.scf_task = self.register_scf_task(scf_input) <NEW_LINE> <DEDENT> self.nscf_task = nscf_task = self.register_nscf_task(nscf_input, deps={self.scf_task: "DEN"}) <NEW_LINE> if not spread_scr: <NEW_LINE> <INDENT> self.scr_task = scr_task = self.register_scr_task(scr_input, deps={nscf_task: "WFK"}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.scr_tasks = [] <NEW_LINE> <DEDENT> nogw = False <NEW_LINE> if nksmall: <NEW_LINE> <INDENT> raise NotImplementedError("with nksmall but strategies have been removed") <NEW_LINE> <DEDENT> if not nogw: <NEW_LINE> <INDENT> if not isinstance(sigma_inputs, (list, tuple)): <NEW_LINE> <INDENT> sigma_inputs = [sigma_inputs] <NEW_LINE> <DEDENT> self.sigma_tasks = [] <NEW_LINE> for sigma_input in sigma_inputs: <NEW_LINE> <INDENT> if spread_scr: <NEW_LINE> <INDENT> new_scr_input = copy.deepcopy(scr_input) <NEW_LINE> new_scr_input.screening.ecuteps = sigma_input.sigma.ecuteps <NEW_LINE> new_scr_input.screening.nband = sigma_input.sigma.nband <NEW_LINE> new_scr_input.electrons.nband = sigma_input.sigma.nband <NEW_LINE> scr_task = self.register_scr_task(new_scr_input, deps={nscf_task: "WFK"}) <NEW_LINE> <DEDENT> task = self.register_sigma_task(sigma_input, deps={nscf_task: "WFK", scr_task: "SCR"}) <NEW_LINE> self.sigma_tasks.append(task) | Work for G0W0 calculations. | 6259907c7047854f46340de6 |
class Heap(object): <NEW_LINE> <INDENT> def __init__(self, pc): <NEW_LINE> <INDENT> self.__array__ = list() <NEW_LINE> self.__array__.append(0) <NEW_LINE> self.__pc__ = pc <NEW_LINE> self.__size__ = 0 <NEW_LINE> <DEDENT> def isEmpty(self): <NEW_LINE> <INDENT> if self.__size__ == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def getParentIDX(self, idx): <NEW_LINE> <INDENT> return idx >> 1 <NEW_LINE> <DEDENT> def getLChildIDX(self, idx): <NEW_LINE> <INDENT> return idx << 1 <NEW_LINE> <DEDENT> def getRChildIDX(self, idx): <NEW_LINE> <INDENT> return (idx << 1) + 1 <NEW_LINE> <DEDENT> def getHiPriChildIDX(self, idx): <NEW_LINE> <INDENT> lChildIDX = self.getLChildIDX(idx) <NEW_LINE> if lChildIDX > self.__size__: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif lChildIDX == self.__size__: <NEW_LINE> <INDENT> return lChildIDX <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rChildIDX = self.getRChildIDX(idx) <NEW_LINE> if self.__pc__(self.__array__[lChildIDX], self.__array__[rChildIDX]) < 0: <NEW_LINE> <INDENT> return rChildIDX <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return lChildIDX <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def heapPush(self, data): <NEW_LINE> <INDENT> idx = self.__size__ + 1 <NEW_LINE> self.__array__.append(0) <NEW_LINE> while idx != 1: <NEW_LINE> <INDENT> if self.__pc__(data, self.__array__[self.getParentIDX(idx)]) > 0: <NEW_LINE> <INDENT> self.__array__[idx] = self.__array__[self.getParentIDX(idx)] <NEW_LINE> idx = self.getParentIDX(idx) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.__array__[idx] = data <NEW_LINE> self.__size__ += 1 <NEW_LINE> <DEDENT> def heapPop(self): <NEW_LINE> <INDENT> if self.__size__ < 1: <NEW_LINE> <INDENT> raise EmptyHeapError <NEW_LINE> <DEDENT> ret = self.__array__[1] <NEW_LINE> lastData = self.__array__[self.__size__] <NEW_LINE> parentIdx = 1 <NEW_LINE> childIdx = self.getHiPriChildIDX(parentIdx) <NEW_LINE> while childIdx != 0: <NEW_LINE> <INDENT> if self.__pc__(lastData, self.__array__[childIdx]) >= 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.__array__[parentIdx] = self.__array__[childIdx] <NEW_LINE> parentIdx = childIdx <NEW_LINE> childIdx = self.getHiPriChildIDX(parentIdx) <NEW_LINE> <DEDENT> self.__array__[parentIdx] = lastData <NEW_LINE> self.__size__ -= 1 <NEW_LINE> self.__array__.pop() <NEW_LINE> return ret | Heap Class.
This heap class is used for Priority Queue.
And this class is consist of array. | 6259907c4f6381625f19a1c6 |
class NpmCommand(Command): <NEW_LINE> <INDENT> description = 'run npm install command' <NEW_LINE> user_options = [ ('executable=', 'e', 'executable path'), ('instance-dir=', 'i', 'instance dir of the project'), ] <NEW_LINE> def initialize_options(self): <NEW_LINE> <INDENT> self.executable = 'npm' <NEW_LINE> self.instance_dir = None <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> command = find_executable(self.executable) <NEW_LINE> if not command: <NEW_LINE> <INDENT> raise DistutilsArgError( "{0} not found. You must specify --executable or -e" " with the npm instance_dir".format(self.executable) ) <NEW_LINE> <DEDENT> if self.instance_dir is None or not os.path.isdir(self.instance_dir): <NEW_LINE> <INDENT> raise DistutilsArgError( "project dir {0} not found." " You must specify --instance_dir or -p" " with the project instance_dir".format(self.instance_dir) ) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> command = '{0} install --prefix {1} {1}'.format( self.executable, self.instance_dir, ) <NEW_LINE> self.announce( 'Running command: {0}'.format(command), level=INFO) <NEW_LINE> self.spawn(command.split(' ')) | Run npm install command | 6259907c8a349b6b43687c8d |
class ProofOfAgeCodeSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = ProofOfAgeCode <NEW_LINE> fields = ( 'code', 'description' ) <NEW_LINE> <DEDENT> def to_internal_value(self, data): <NEW_LINE> <INDENT> if 'code' in data and data['code'] is not None: <NEW_LINE> <INDENT> return ProofOfAgeCode.objects.get(code=data['code']) <NEW_LINE> <DEDENT> return super().to_internal_value(data) | Serializes ProofOfAgeCode fields. | 6259907c3d592f4c4edbc877 |
class TestDataverseGenericTravisNot(object): <NEW_LINE> <INDENT> def test_dataverse_from_json_to_json_valid(self): <NEW_LINE> <INDENT> data = [ ({json_upload_min()}, {}), ({json_upload_full()}, {}), ({json_upload_min()}, {"data_format": "dataverse_upload"}), ({json_upload_min()}, {"validate": False}), ({json_upload_min()}, {"filename_schema": "", "validate": False},), ({json_upload_min()}, {"filename_schema": "wrong", "validate": False},), ( {json_upload_min()}, { "filename_schema": test_config[ "dataverse_upload_schema_filename" ], "validate": True, }, ), ({"{}"}, {"validate": False}), ] <NEW_LINE> for args_from, kwargs_from in data: <NEW_LINE> <INDENT> pdv_start = data_object() <NEW_LINE> args = args_from <NEW_LINE> kwargs = kwargs_from <NEW_LINE> pdv_start.from_json(*args, **kwargs) <NEW_LINE> if "validate" in kwargs: <NEW_LINE> <INDENT> if not kwargs["validate"]: <NEW_LINE> <INDENT> kwargs = {"validate": False} <NEW_LINE> <DEDENT> <DEDENT> data_out = json.loads(pdv_start.json(**kwargs)) <NEW_LINE> write_json(test_config["dataverse_json_output_filename"], data_out) <NEW_LINE> data_in = read_file(test_config["dataverse_json_output_filename"]) <NEW_LINE> pdv_end = data_object() <NEW_LINE> kwargs = kwargs_from <NEW_LINE> pdv_end.from_json(data_in, **kwargs) <NEW_LINE> for key, val in pdv_end.get().items(): <NEW_LINE> <INDENT> assert getattr(pdv_start, key) == getattr(pdv_end, key) <NEW_LINE> <DEDENT> assert len(pdv_start.__dict__) == len(pdv_end.__dict__,) | Generic tests for Dataverse(), not running on Travis (no file-write permissions). | 6259907c3617ad0b5ee07b7f |
class ViEnG(bpy.types.NodeSocket): <NEW_LINE> <INDENT> bl_idname = 'ViEnG' <NEW_LINE> bl_label = 'EnVi Geometry' <NEW_LINE> valid = ['EnVi Geometry'] <NEW_LINE> link_limit = 1 <NEW_LINE> def draw(self, context, layout, node, text): <NEW_LINE> <INDENT> layout.label(text) <NEW_LINE> <DEDENT> def draw_color(self, context, node): <NEW_LINE> <INDENT> return (0.0, 0.0, 1.0, 0.75) | Energy geometry out socket | 6259907c3346ee7daa33837a |
class Student(models.Model): <NEW_LINE> <INDENT> customer = models.ForeignKey("CustomerInfo") <NEW_LINE> class_grades = models.ManyToManyField("ClassList") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s" % self.customer | 学员表 | 6259907c92d797404e389875 |
class LogOp(Op): <NEW_LINE> <INDENT> def __call__(self, node): <NEW_LINE> <INDENT> new_node = Op.__call__(self) <NEW_LINE> new_node.inputs = [node] <NEW_LINE> new_node.name = 'Log({})'.format(node.name) <NEW_LINE> return new_node <NEW_LINE> <DEDENT> def compute(self, node, input_vals): <NEW_LINE> <INDENT> assert(len(input_vals) == 1) <NEW_LINE> return np.log(input_vals[0]) <NEW_LINE> <DEDENT> def gradient(self, node, output_grad): <NEW_LINE> <INDENT> return [output_grad / node.inputs[0]] | Op that performs log function(base e) | 6259907c76e4537e8c3f0fb1 |
class ModelLanguageEdamId(str, enum.Enum): <NEW_LINE> <INDENT> BNGL = 'format_3972' <NEW_LINE> CellML = 'format_3240' <NEW_LINE> CopasiML = 'format_9003' <NEW_LINE> GENESIS = 'format_9056' <NEW_LINE> GINML = 'format_9009' <NEW_LINE> HOC = 'format_9005' <NEW_LINE> Kappa = 'format_9006' <NEW_LINE> LEMS = 'format_9004' <NEW_LINE> MASS = 'format_9011' <NEW_LINE> MorpheusML = 'format_9002' <NEW_LINE> NCS = 'format_9057' <NEW_LINE> NeuroML = 'format_3971' <NEW_LINE> NMODL = 'format_9052' <NEW_LINE> pharmML = 'format_9007' <NEW_LINE> RBA = 'format_9012' <NEW_LINE> SBML = 'format_2585' <NEW_LINE> SLI = 'format_9054' <NEW_LINE> Smoldyn = 'format_9001' <NEW_LINE> VCML = 'format_9000' <NEW_LINE> XPP = 'format_9010' <NEW_LINE> ZGINML = 'format_9008' | Model language EDAM id | 6259907cf548e778e596cfc4 |
class NoFilterDiffOpcodeGenerator(DiffOpcodeGenerator): <NEW_LINE> <INDENT> def _apply_processors(self, opcodes): <NEW_LINE> <INDENT> for opcode in opcodes: <NEW_LINE> <INDENT> yield opcode | A DiffOpcodeGenerator which does not filter interdiffs | 6259907c26068e7796d4e372 |
class TestFunctionalCapabilityListResult(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testFunctionalCapabilityListResult(self): <NEW_LINE> <INDENT> pass | FunctionalCapabilityListResult unit test stubs | 6259907ca05bb46b3848be42 |
class IconBlurb(Orderable): <NEW_LINE> <INDENT> homepage = models.ForeignKey(HomePage, related_name="blurbs") <NEW_LINE> icon = models.CharField(max_length=100, help_text="A font awesome icon name. i.e. icon-compass. More here: " "http://fortawesome.github.io/Font-Awesome/icons/") <NEW_LINE> title = models.CharField(max_length=200) <NEW_LINE> content = models.TextField() <NEW_LINE> link = models.CharField(max_length=2000, blank=True, help_text="Optional, if provided clicking the blurb will go here.") | An icon box on a HomePage | 6259907c167d2b6e312b82ad |
class ImageUpload(models.Model): <NEW_LINE> <INDENT> img = models.ImageField(upload_to="images", null=True, blank=True, max_length=255) | Model used to upload images to server, and have server generate thumbnails
Upload path defaults to "images" folder but will change during POST; use specified "deployment" to get Campaign and Deployment names
e.g. deployment id = 2, look up Deployment short_name = "r20110612_033752_st_helens_01_elephant_rock_deep_repeat"
and respective Campaign short_name = "Campaign1"
Upload image goes into: UPLOAD_PATH/r20110612_033752_st_helens_01_elephant_rock_deep_repeat/Campaign1/images/
Generated thumbnail goes into: UPLOAD_PATH/r20110612_033752_st_helens_01_elephant_rock_deep_repeat/Campaign1/thumbnails/
UPLOAD_PATH defined in settings.py | 6259907c7cff6e4e811b7473 |
class Superlu(Package): <NEW_LINE> <INDENT> homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/#superlu" <NEW_LINE> url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_5.2.1.tar.gz" <NEW_LINE> version('5.2.1', '3a1a9bff20cb06b7d97c46d337504447') <NEW_LINE> version('4.3', 'b72c6309f25e9660133007b82621ba7c') <NEW_LINE> variant('pic', default=True, description='Build with position independent code') <NEW_LINE> depends_on('cmake', when='@5.2.1:', type='build') <NEW_LINE> depends_on('blas') <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> cmake_args = [ '-Denable_blaslib=OFF', '-DBLAS_blas_LIBRARY={0}'.format(spec['blas'].libs.joined()) ] <NEW_LINE> if '+pic' in spec: <NEW_LINE> <INDENT> cmake_args.extend([ '-DCMAKE_POSITION_INDEPENDENT_CODE=ON' ]) <NEW_LINE> <DEDENT> cmake_args.extend(std_cmake_args) <NEW_LINE> with working_dir('spack-build', create=True): <NEW_LINE> <INDENT> cmake('..', *cmake_args) <NEW_LINE> make() <NEW_LINE> make('install') <NEW_LINE> <DEDENT> <DEDENT> @when('@4.3') <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> config = [] <NEW_LINE> config.extend([ 'PLAT = _x86_64', 'SuperLUroot = %s' % self.stage.source_path, 'SUPERLULIB = $(SuperLUroot)/lib/libsuperlu_{0}.a' .format(self.spec.version), 'BLASDEF = -DUSE_VENDOR_BLAS', 'BLASLIB = {0}'.format(spec['blas'].libs.ld_flags), 'TMGLIB = libtmglib.a', 'LIBS = $(SUPERLULIB) $(BLASLIB)', 'ARCH = ar', 'ARCHFLAGS = cr', 'RANLIB = {0}'.format('ranlib' if which('ranlib') else 'echo'), 'CC = {0}'.format(os.environ['CC']), 'FORTRAN = {0}'.format(os.environ['FC']), 'LOADER = {0}'.format(os.environ['CC']), 'CDEFS = -DAdd_' ]) <NEW_LINE> if '+pic' in spec: <NEW_LINE> <INDENT> config.extend([ 'CFLAGS = -O3 {0}'.format(self.compiler.pic_flag), 'NOOPTS = {0}'.format(self.compiler.pic_flag), 'FFLAGS = -O2 {0}'.format(self.compiler.pic_flag), 'LOADOPTS = {0}'.format(self.compiler.pic_flag) ]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> config.extend([ 'CFLAGS = -O3', 'NOOPTS = ', 'FFLAGS = -O2', 'LOADOPTS = ' ]) <NEW_LINE> <DEDENT> with open('make.inc', 'w') as inc: <NEW_LINE> <INDENT> for option in config: <NEW_LINE> <INDENT> inc.write('{0}\n'.format(option)) <NEW_LINE> <DEDENT> <DEDENT> make(parallel=False) <NEW_LINE> install_tree('lib', prefix.lib) <NEW_LINE> headers = glob.glob(join_path('SRC', '*.h')) <NEW_LINE> mkdir(prefix.include) <NEW_LINE> for h in headers: <NEW_LINE> <INDENT> install(h, prefix.include) | SuperLU is a general purpose library for the direct solution of large,
sparse, nonsymmetric systems of linear equations on high performance
machines. SuperLU is designed for sequential machines. | 6259907ca8370b77170f1e02 |
class TbModel(accelerators_model.TLineModel): <NEW_LINE> <INDENT> pv_module = _pvs_tb <NEW_LINE> model_module = pv_module.model <NEW_LINE> device_names = pv_module.device_names <NEW_LINE> prefix = device_names.section.upper() <NEW_LINE> database = pv_module.get_database() <NEW_LINE> nr_bunches = LiModel.nr_bunches <NEW_LINE> _downstream_accelerator_prefix = 'BO' <NEW_LINE> _delta_rx, _delta_angle = _pymodels.coordinate_system.parameters(prefix) | Definition of TB area structure. | 6259907c7d847024c075de11 |
class HttpVersionMatchConditionParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'odata_type': {'required': True, 'constant': True}, 'operator': {'required': True}, } <NEW_LINE> _attribute_map = { 'odata_type': {'key': '@odata\\.type', 'type': 'str'}, 'operator': {'key': 'operator', 'type': 'str'}, 'negate_condition': {'key': 'negateCondition', 'type': 'bool'}, 'match_values': {'key': 'matchValues', 'type': '[str]'}, } <NEW_LINE> odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleHttpVersionConditionParameters" <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(HttpVersionMatchConditionParameters, self).__init__(**kwargs) <NEW_LINE> self.operator = kwargs['operator'] <NEW_LINE> self.negate_condition = kwargs.get('negate_condition', None) <NEW_LINE> self.match_values = kwargs.get('match_values', None) | Defines the parameters for HttpVersion match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleHttpVersionConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Equal".
:type operator: str or ~azure.mgmt.cdn.models.HttpVersionOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str] | 6259907c7047854f46340de8 |
class Bot: <NEW_LINE> <INDENT> def __init__(self, name, content): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> functions = getSections(content) <NEW_LINE> self.functions = {} <NEW_LINE> for i in functions.keys(): <NEW_LINE> <INDENT> parts = getParts(functions[i]) <NEW_LINE> temp = {} <NEW_LINE> for j in parts.keys(): <NEW_LINE> <INDENT> temp.update({j:getList(parts[j])}) <NEW_LINE> <DEDENT> self.functions.update({i:temp}) <NEW_LINE> <DEDENT> <DEDENT> def getFunction(self, name): <NEW_LINE> <INDENT> return self.functions.get(name) <NEW_LINE> <DEDENT> def getFunctions(self): <NEW_LINE> <INDENT> return self.functions.keys() | Representaion of a Bot | 6259907c1b99ca400229024f |
class TranslateToLearnedQuestion(TranslateQuestion): <NEW_LINE> <INDENT> def _check_answer(self, current_answer): <NEW_LINE> <INDENT> return self._check_translated_word_answer(current_answer) <NEW_LINE> <DEDENT> def _get_question_word(self): <NEW_LINE> <INDENT> return self._current_word.native_word.get_most_common_spelling() <NEW_LINE> <DEDENT> def _get_answer_from_word(self, current_word): <NEW_LINE> <INDENT> return current_word.translated_word.get_most_common_spelling() | A translate question from native language to learned language. | 6259907c55399d3f05627f47 |
class Proc_Component_Attr_Spec(STRINGBase): <NEW_LINE> <INDENT> subclass_names = ['Access_Spec', 'Proc_Component_PASS_Arg_Name'] <NEW_LINE> def match(string): return STRINGBase.match(['POINTER','PASS','NOPASS'], string) <NEW_LINE> match = staticmethod(match) | <proc-component-attr-spec> = POINTER
| PASS [ ( <arg-name> ) ]
| NOPASS
| <access-spec> | 6259907c4f6381625f19a1c7 |
class FooController(Controller): <NEW_LINE> <INDENT> model = Instance(FooModel) <NEW_LINE> def _model_default(self): <NEW_LINE> <INDENT> return FooModel(my_str="meh") | Test dialog that does nothing useful. | 6259907c3d592f4c4edbc878 |
class Graph: <NEW_LINE> <INDENT> def __init__(self, edges=[], directed=True, root=None): <NEW_LINE> <INDENT> self.directed = directed <NEW_LINE> self.graph = {} <NEW_LINE> self.root = root <NEW_LINE> if root: <NEW_LINE> <INDENT> self.graph[tuple(root)] = set([]) <NEW_LINE> <DEDENT> self.add_edges_from(edges) <NEW_LINE> <DEDENT> def add_edges_from(self, edges): <NEW_LINE> <INDENT> for node1, node2 in edges: <NEW_LINE> <INDENT> self.add_edge(node1,node2) <NEW_LINE> <DEDENT> <DEDENT> def add_edge(self, node1, node2): <NEW_LINE> <INDENT> def _add(n1, n2): <NEW_LINE> <INDENT> n1 = tuple(n1) <NEW_LINE> n2 = tuple(n2) <NEW_LINE> if n1 in self.graph.keys(): <NEW_LINE> <INDENT> self.graph[n1].add(n2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.graph[n1] = set([n2]) <NEW_LINE> <DEDENT> <DEDENT> _add(node1, node2) <NEW_LINE> if not self.directed: <NEW_LINE> <INDENT> _add(node2, node1) | This class implements a simple graph structure. | 6259907c76e4537e8c3f0fb3 |
class SapOpenHubTableDataset(Dataset): <NEW_LINE> <INDENT> _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, 'open_hub_destination_name': {'required': True}, } <NEW_LINE> _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SapOpenHubTableDataset, self).__init__(**kwargs) <NEW_LINE> self.type = 'SapOpenHubTable' <NEW_LINE> self.open_hub_destination_name = kwargs['open_hub_destination_name'] <NEW_LINE> self.exclude_last_request = kwargs.get('exclude_last_request', None) <NEW_LINE> self.base_request_id = kwargs.get('base_request_id', None) | Sap Business Warehouse Open Hub Destination Table properties.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset.Constant filled by server.
:type type: str
:param description: Dataset description.
:type description: str
:param structure: Columns that define the structure of the dataset. Type: array (or Expression
with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the dataset. Type: array (or
Expression with resultType array), itemType: DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference
:param parameters: Parameters for dataset.
:type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the Dataset.
:type annotations: list[object]
:param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the
root level.
:type folder: ~azure.synapse.artifacts.models.DatasetFolder
:param open_hub_destination_name: Required. The name of the Open Hub Destination with
destination type as Database Table. Type: string (or Expression with resultType string).
:type open_hub_destination_name: object
:param exclude_last_request: Whether to exclude the records of the last request. The default
value is true. Type: boolean (or Expression with resultType boolean).
:type exclude_last_request: object
:param base_request_id: The ID of request for delta loading. Once it is set, only data with
requestId larger than the value of this property will be retrieved. The default value is 0.
Type: integer (or Expression with resultType integer ).
:type base_request_id: object | 6259907c67a9b606de5477c0 |
@pytest.mark.usefixtures('db') <NEW_LINE> class TestUser: <NEW_LINE> <INDENT> def test_get_by_id(self): <NEW_LINE> <INDENT> user = create_user('[email protected]') <NEW_LINE> retrieved = get_user_by_id(user.id) <NEW_LINE> assert retrieved == user <NEW_LINE> <DEDENT> def test_created_at_defaults_to_datetime(self): <NEW_LINE> <INDENT> user = create_user(email='[email protected]') <NEW_LINE> assert bool(user.created_at) <NEW_LINE> assert isinstance(user.created_at, datetime.datetime) <NEW_LINE> <DEDENT> def test_password_is_nullable(self): <NEW_LINE> <INDENT> user = create_user(email='[email protected]') <NEW_LINE> assert user.password is None <NEW_LINE> <DEDENT> def test_factory(self, db): <NEW_LINE> <INDENT> user = UserFactory(password='myprecious') <NEW_LINE> db.session.commit() <NEW_LINE> assert bool(user.email) <NEW_LINE> assert bool(user.created_at) <NEW_LINE> assert user.is_admin is False <NEW_LINE> assert user.active is True <NEW_LINE> assert user.check_password('myprecious') <NEW_LINE> <DEDENT> def test_check_password(self): <NEW_LINE> <INDENT> user = create_user(email='[email protected]', password='foobarbaz123') <NEW_LINE> assert user.check_password('foobarbaz123') is True <NEW_LINE> assert user.check_password('lajfd') is False <NEW_LINE> <DEDENT> def test_full_name(self): <NEW_LINE> <INDENT> user = UserFactory(first_name='Foo', last_name='Bar') <NEW_LINE> assert user.full_name == 'Foo Bar' <NEW_LINE> <DEDENT> def test_string_representation(self): <NEW_LINE> <INDENT> user = UserFactory(email="[email protected]") <NEW_LINE> user.save() <NEW_LINE> assert str(user) == '[email protected]' <NEW_LINE> <DEDENT> def test_is_active(self): <NEW_LINE> <INDENT> user = UserFactory(active=True) <NEW_LINE> user.save() <NEW_LINE> assert user.is_active <NEW_LINE> <DEDENT> def test_is_anonymous(self): <NEW_LINE> <INDENT> user = UserFactory() <NEW_LINE> user.save() <NEW_LINE> assert user.is_anonymous is False <NEW_LINE> <DEDENT> def test_is_authenticated_is_true(self): <NEW_LINE> <INDENT> user = UserFactory() <NEW_LINE> user.save() <NEW_LINE> assert user.is_authenticated is True | User tests. | 6259907c56b00c62f0fb4308 |
class FastTemporaryFile(object): <NEW_LINE> <INDENT> __slots__ = ['_stream', '_isFile', '_smallFileSize'] <NEW_LINE> def __init__(self, smallFileSize=65536): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import cStringIO as StringIO <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> import StringIO <NEW_LINE> <DEDENT> self._stream = StringIO.StringIO() <NEW_LINE> self._isFile = False <NEW_LINE> self._smallFileSize = int(smallFileSize) <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if not self._isFile: <NEW_LINE> <INDENT> if self._stream.tell() + len(data) > self._smallFileSize: <NEW_LINE> <INDENT> from tempfile import TemporaryFile <NEW_LINE> fstream = TemporaryFile() <NEW_LINE> self._stream.seek(0) <NEW_LINE> fstream.writelines(self._stream) <NEW_LINE> self._stream.close() <NEW_LINE> self._stream = fstream <NEW_LINE> self._isFile = True <NEW_LINE> <DEDENT> <DEDENT> self._stream.write(data) <NEW_LINE> <DEDENT> def __getattribute__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return object.__getattribute__(self, name) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> stream = object.__getattribute__(self, '_stream') <NEW_LINE> return getattr(stream, name) | A file-like stream object for writing temporary data efficiently.
Stores the file contents in-memory for small files and uses a
temporary file on the file system for large files (the temporary
file is cleaned up when the stream is closed). You may read/write from
this file as you would a normal file object. | 6259907cd486a94d0ba2d9ec |
class UnionPayRefundSerializer(serializers.Serializer): <NEW_LINE> <INDENT> accNo = serializers.CharField() <NEW_LINE> accessType = serializers.CharField() <NEW_LINE> currencyCode = serializers.CharField() <NEW_LINE> encoding = serializers.CharField() <NEW_LINE> merId = serializers.CharField() <NEW_LINE> orderId = serializers.CharField() <NEW_LINE> origQryId = serializers.CharField() <NEW_LINE> payCardType = serializers.CharField() <NEW_LINE> payType = serializers.CharField() <NEW_LINE> queryId = serializers.CharField() <NEW_LINE> respCode = serializers.CharField() <NEW_LINE> respMsg = serializers.CharField() <NEW_LINE> settleAmt = serializers.CharField() <NEW_LINE> settleCurrencyCode = serializers.CharField() <NEW_LINE> settleDate = serializers.CharField() <NEW_LINE> signMethod = serializers.CharField() <NEW_LINE> signPubKeyCert = serializers.CharField() <NEW_LINE> traceNo = serializers.CharField() <NEW_LINE> traceTime = serializers.CharField() <NEW_LINE> txnAmt = serializers.CharField() <NEW_LINE> txnSubType = serializers.CharField() <NEW_LINE> txnTime = serializers.CharField() <NEW_LINE> txnType = serializers.CharField() <NEW_LINE> version = serializers.CharField() <NEW_LINE> def create(self, validated_data): <NEW_LINE> <INDENT> refund_id = validated_data.get('orderId') <NEW_LINE> unionpay_refund_id = validated_data.get('queryId') <NEW_LINE> unionpay_payment_id = validated_data.get('origQryId') <NEW_LINE> unionpay_payment_card = validated_data.get('accNo') <NEW_LINE> merchant_id = validated_data.get('merId') <NEW_LINE> refund_count = int(validated_data.get('settleAmt')) <NEW_LINE> refund_time = validated_data.get('txnTime') <NEW_LINE> unionpay_refund = UnionPayRefund( refund_id=refund_id, unionpay_refund_id=unionpay_refund_id, unionpay_payment_id=unionpay_payment_id, unionpay_payment_card=unionpay_payment_card, merchant_id=merchant_id, refund_count=refund_count, refund_time=refund_time, ) <NEW_LINE> try: <NEW_LINE> <INDENT> refund = Refund.objects.get(refund_id=refund_id) <NEW_LINE> <DEDENT> except Refund.DoesNotExist: <NEW_LINE> <INDENT> return unionpay_refund <NEW_LINE> <DEDENT> refund.unionpay_refund_id = unionpay_refund_id <NEW_LINE> refund.refund_time = refund_time <NEW_LINE> refund.refund_status = 's' <NEW_LINE> with transaction.atomic(): <NEW_LINE> <INDENT> unionpay_refund.save() <NEW_LINE> refund.save() <NEW_LINE> <DEDENT> return unionpay_refund | 银联退款单返回 | 6259907c5fdd1c0f98e5f9b4 |
class Job(Thread): <NEW_LINE> <INDENT> def __init__(self, function, *args, **kargs): <NEW_LINE> <INDENT> Thread.__init__(self) <NEW_LINE> self.daemon = True <NEW_LINE> self.function = function <NEW_LINE> self.args = args <NEW_LINE> self.kargs = kargs <NEW_LINE> self.error = None <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.function(*self.args, **self.kargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.error = e <NEW_LINE> <DEDENT> <DEDENT> def join(self): <NEW_LINE> <INDENT> Thread.join(self) <NEW_LINE> if self.error != None: <NEW_LINE> <INDENT> raise self.error | Threaded job class | 6259907ca05bb46b3848be43 |
class Subject(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200) <NEW_LINE> begin_date = models.IntegerField() <NEW_LINE> end_date = models.IntegerField() | Subject is an entity which controls global time limits of class
and connects its name to Cell
Fields:
name (str): name of the subject
begn_date, end_date (int): epoch seconds, when this or that class
is being held | 6259907c97e22403b383c936 |
class Record(object): <NEW_LINE> <INDENT> def __init__(self, data=None, permissions=None): <NEW_LINE> <INDENT> self.swagger_types = { 'data': 'object', 'permissions': 'RecordPermissions' } <NEW_LINE> self.attribute_map = { 'data': 'data', 'permissions': 'permissions' } <NEW_LINE> self._data = data <NEW_LINE> self._permissions = permissions <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def permissions(self): <NEW_LINE> <INDENT> return self._permissions <NEW_LINE> <DEDENT> @permissions.setter <NEW_LINE> def permissions(self, permissions): <NEW_LINE> <INDENT> self._permissions = permissions <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259907c283ffb24f3cf52d6 |
class AudioRecord(): <NEW_LINE> <INDENT> def __init__(self, filename="output.wav", interval=1): <NEW_LINE> <INDENT> self.outputFilename = filename <NEW_LINE> self.rate = 22050 <NEW_LINE> self.interval = interval <NEW_LINE> self.chunk = 1024 <NEW_LINE> self.format = pyaudio.paInt16 <NEW_LINE> self.channels = 1 <NEW_LINE> self.port = None <NEW_LINE> self.stream = None <NEW_LINE> <DEDENT> def begin_audio(self): <NEW_LINE> <INDENT> if self.stream is None: <NEW_LINE> <INDENT> self.port = pyaudio.PyAudio() <NEW_LINE> self.rate = int(self.port.get_device_info_by_index(0)['defaultSampleRate']) <NEW_LINE> self.chunk = int(self.interval * self.rate) <NEW_LINE> self.stream = self.port.open(format=self.format, channels=self.channels, rate=self.rate, input=True, frames_per_buffer=self.chunk) <NEW_LINE> self.stream.stop_stream() <NEW_LINE> <DEDENT> <DEDENT> def get_data_from_audio(self): <NEW_LINE> <INDENT> data_array = numpy.zeros(self.chunk, dtype=numpy.float) <NEW_LINE> data_stream = None <NEW_LINE> if self.stream is not None: <NEW_LINE> <INDENT> self.stream.start_stream() <NEW_LINE> data_stream = self.stream.read(self.chunk) <NEW_LINE> data_array = (numpy.fromstring(data_stream, dtype=numpy.int16) / 32768.0) * 5.0 <NEW_LINE> <DEDENT> return data_stream, data_array <NEW_LINE> <DEDENT> def end_audio(self): <NEW_LINE> <INDENT> if self.stream: <NEW_LINE> <INDENT> self.stream.stop_stream() <NEW_LINE> self.stream.close() <NEW_LINE> self.stream = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.port.terminate() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.port = None <NEW_LINE> <DEDENT> <DEDENT> def save_wave(self, frames): <NEW_LINE> <INDENT> wf = wave.open(self.outputFilename, 'wb') <NEW_LINE> wf.setnchannels(self.channels) <NEW_LINE> wf.setsampwidth(self.port.get_sample_size(self.format)) <NEW_LINE> wf.setframe(self.rate) <NEW_LINE> wf.writeframes(b''.join(frames)) <NEW_LINE> wf.close() | Class to record audio data. | 6259907c71ff763f4b5e91e1 |
class CreateRatingSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Rating <NEW_LINE> fields = ("star", "product") <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> rating, _ = Rating.objects.update_or_create( ip=validated_data.get('ip', None), product=validated_data.get('product', None), defaults={'star': validated_data.get("star")} ) <NEW_LINE> return rating | Добавление рейтинга пользователем | 6259907c7047854f46340dea |
class InvalidInputError(Exception): <NEW_LINE> <INDENT> pass | raised when invalid input is received | 6259907ca8370b77170f1e05 |
class Browser(StaticLiveServerTestCase): <NEW_LINE> <INDENT> port = 12345 <NEW_LINE> fixtures = [ "page_fixtures.json", "footer_fixtures.json", "social_fixtures.json", "alert_fixtures.json", "button_fixtures.json", "placeholder_fixtures.json", "aboutapp_fixtures.json", "portfolioapp_fixtures.json", "blogapp_fixtures.json", "contact_fixtures.json"] <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super().setUpClass() <NEW_LINE> cls.selenium = WebDriver() <NEW_LINE> cls.selenium.implicitly_wait(10) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls.selenium.quit() <NEW_LINE> super().tearDownClass() | Browser for the tests | 6259907c76e4537e8c3f0fb5 |
class STSHI(BinaryStream): <NEW_LINE> <INDENT> def __init__(self, bytes, mainStream, offset, size): <NEW_LINE> <INDENT> BinaryStream.__init__(self, bytes, mainStream=mainStream) <NEW_LINE> self.pos = offset <NEW_LINE> self.size = size <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> print('<stshi type="STSHI" offset="%d" size="%d bytes">' % (self.pos, self.size)) <NEW_LINE> posOrig = self.pos <NEW_LINE> self.stshif = Stshif(self.bytes, self.mainStream, self.pos) <NEW_LINE> self.stshif.dump() <NEW_LINE> self.pos += self.stshif.size <NEW_LINE> if self.pos - posOrig < self.size: <NEW_LINE> <INDENT> self.printAndSet("ftcBi", self.readuInt16()) <NEW_LINE> if self.pos - posOrig < self.size: <NEW_LINE> <INDENT> stshiLsd = StshiLsd(self.bytes, self, self.pos) <NEW_LINE> stshiLsd.dump() <NEW_LINE> <DEDENT> <DEDENT> print('</stshi>') | The STSHI structure specifies general stylesheet and related information. | 6259907c67a9b606de5477c1 |
class QuerySinglePaymentResultResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ErrCode = None <NEW_LINE> self.ErrMessage = None <NEW_LINE> self.Result = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ErrCode = params.get("ErrCode") <NEW_LINE> self.ErrMessage = params.get("ErrMessage") <NEW_LINE> if params.get("Result") is not None: <NEW_LINE> <INDENT> self.Result = QuerySinglePaymentResultData() <NEW_LINE> self.Result._deserialize(params.get("Result")) <NEW_LINE> <DEDENT> self.RequestId = params.get("RequestId") | QuerySinglePaymentResult返回参数结构体
| 6259907c7d43ff2487428130 |
class JavaClassPath: <NEW_LINE> <INDENT> def __init__(self, class_path=None): <NEW_LINE> <INDENT> self.package = JavaPackage() <NEW_LINE> self.jclass = JavaClass() <NEW_LINE> if isinstance(class_path, str) and class_path: <NEW_LINE> <INDENT> match = RE().match("class_path_match", class_path) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.package = JavaPackage( JavaUtils().normalize_package_path( match.group(1) ).split(".") ) <NEW_LINE> self.jclass = JavaClass( JavaUtils().normalize_package_path(match.group(3)) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_package(self): <NEW_LINE> <INDENT> return self.package <NEW_LINE> <DEDENT> def get_class(self): <NEW_LINE> <INDENT> return self.jclass <NEW_LINE> <DEDENT> def as_path(self): <NEW_LINE> <INDENT> return os.path.join(self.package.as_path(), self.jclass.get()) <NEW_LINE> <DEDENT> def as_class_path(self): <NEW_LINE> <INDENT> if self.package.is_empty(): <NEW_LINE> <INDENT> return self.jclass.get() <NEW_LINE> <DEDENT> elif self.jclass.is_empty(): <NEW_LINE> <INDENT> return self.package.as_class_path() <NEW_LINE> <DEDENT> return ".".join([ x for x in [self.package.as_class_path(), self.jclass.get()] if x ]) | A class represents a Java class path | 6259907c97e22403b383c937 |
class PyWinCFFIError(Exception): <NEW_LINE> <INDENT> pass | The base class for all custom exceptions that pywincffi can throw. | 6259907c32920d7e50bc7a79 |
class ListKeysVerb(ListEnclavesVerb): <NEW_LINE> <INDENT> def main(self, *, args) -> int: <NEW_LINE> <INDENT> warnings.warn( 'list_keys is deprecated and will be removed in a future release. Use list_enclaves ' 'instead.', FutureWarning) <NEW_LINE> return super().main(args=args) | DEPRECATED: List enclaves in keystore. Use list_enclaves instead. | 6259907c4a966d76dd5f091d |
class TestPtBr(_SimpleAutomotiveTestMixin): <NEW_LINE> <INDENT> license_plate_pattern: Pattern = re.compile(r"[A-Z]{3}-\d{4}") | Test pt_BR automotive provider methods | 6259907c7cff6e4e811b7477 |
class UserUnReadNotifications(ListAPIView): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticatedOrReadOnly,) <NEW_LINE> serializer_class = NotificationSerializer <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> serializer_context = {'request': request} <NEW_LINE> user = request.user <NEW_LINE> if user.is_subscribed: <NEW_LINE> <INDENT> queryset = user.notifications.unread() <NEW_LINE> serializer = self.serializer_class( queryset, context=serializer_context, many=True) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> return Response( {'message': 'you are not subscribed to notifications'}, status=status.HTTP_200_OK) | Get all unread notifications class | 6259907c283ffb24f3cf52d8 |
class TenantsCommand(base.ListCommand): <NEW_LINE> <INDENT> columns = ['domain_count', 'id'] <NEW_LINE> def execute(self, parsed_args): <NEW_LINE> <INDENT> return self.client.reports.tenants_all() | Get list of tenants and domain count for each | 6259907caad79263cf4301f1 |
class Plant(Transparent): <NEW_LINE> <INDENT> def makeObject(self, x, y, z, metadata): <NEW_LINE> <INDENT> mesh = bpy.data.meshes.new(name="Block") <NEW_LINE> mesh.from_pydata([[-0.5,-0.5,-0.5],[0.5,-0.5,-0.5],[-0.5,0.5,-0.5],[0.5,0.5,-0.5],[-0.5,-0.5,0.5],[0.5,-0.5,0.5],[-0.5,0.5,0.5],[0.5,0.5,0.5]], [], [[0,3,7,4], [1,2,6,5]]) <NEW_LINE> mesh.update() <NEW_LINE> obj = bpy.data.objects.new("Block", mesh) <NEW_LINE> obj.location.x = x + 0.5 <NEW_LINE> obj.location.y = y + 0.5 <NEW_LINE> obj.location.z = z + 0.5 <NEW_LINE> obj.blockId = self._id <NEW_LINE> obj.blockMetadata = metadata <NEW_LINE> bpy.context.scene.objects.link(obj) <NEW_LINE> activeObject = bpy.context.scene.objects.active <NEW_LINE> bpy.context.scene.objects.active = obj <NEW_LINE> bpy.ops.object.mode_set(mode='EDIT') <NEW_LINE> bpy.ops.mesh.select_all(action='SELECT') <NEW_LINE> bpy.ops.mesh.normals_make_consistent(inside=False) <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.context.scene.objects.active = activeObject <NEW_LINE> return obj <NEW_LINE> <DEDENT> def makeUVMap(self, obj, metadata): <NEW_LINE> <INDENT> obj.data.uv_textures.new(); | Plants that are X shaped | 6259907c009cb60464d02f77 |
class CategoryTypeViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = CategoryType.types.all() <NEW_LINE> serializer_class = CategoryTypeSerializer | This viewset automatically provides `list`, `detail` actions. | 6259907c7b180e01f3e49d81 |
class ListCreateUsers(ListCreateResource): <NEW_LINE> <INDENT> login_required = True <NEW_LINE> model = User <NEW_LINE> filterable_fields = () <NEW_LINE> searchable_fields = ('name',) <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> def perform_create(self,req,db,posted_data): <NEW_LINE> <INDENT> tenant_id = self.get_auth_tenant_id(req) <NEW_LINE> print (tenant_id) <NEW_LINE> email = posted_data.get("email") <NEW_LINE> phone_number = posted_data.get("phone_number") <NEW_LINE> organization_id = posted_data.pop("organization_id", None) <NEW_LINE> if not phone_number and not email: <NEW_LINE> <INDENT> raise falcon.HTTPBadRequest(title="Missing Field", description="either phone_number or email field is needed") <NEW_LINE> <DEDENT> raw_password = self.model.get_random_password() <NEW_LINE> print (raw_password) <NEW_LINE> posted_data.update({"tenant_id": tenant_id}) <NEW_LINE> user = db.objects( self.model.insert() ).create(**posted_data) <NEW_LINE> user_id = user.get("id") <NEW_LINE> self.model.set_password(db =db , user_id = user_id, password = raw_password) <NEW_LINE> tenant = db.objects( Tenant.get( pk=tenant_id) ).fetch()[0] <NEW_LINE> if 'B2B' == tenant.get("business_mode"): <NEW_LINE> <INDENT> if not organization_id: <NEW_LINE> <INDENT> raise falcon.HTTPBadRequest( title="Organization is required for B2B business mode", description="Either organization_id field is required." ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> db.objects( OrganizationUser.insert() ).create(**{"organization_id": organization_id,"user_id":user_id}) <NEW_LINE> <DEDENT> <DEDENT> if email: <NEW_LINE> <INDENT> provider = db.objects( EmailProvider.gmail() ).filter(tenant_id__eq=tenant_id).fetch()[0] <NEW_LINE> template = db.objects( EmailTemplate.account_created() ).filter(tenant_id__eq=tenant_id).fetch()[0] <NEW_LINE> send_gmail.delay(provider,template, recipient=email, body_replace_params = {"password":raw_password}) <NEW_LINE> <DEDENT> return user | We expect client_id to be passed for non authorized logins. | 6259907c5fdd1c0f98e5f9b7 |
class City(BaseModel, Base): <NEW_LINE> <INDENT> __tablename__ = "cities" <NEW_LINE> state_id = Column(String(60), ForeignKey('states.id'), nullable=False) <NEW_LINE> name = Column(String(128), nullable=False) <NEW_LINE> places = relationship('Place', backref='cities', cascade='all, delete, delete-orphan') | Define the class City that inherits from BaseModel. | 6259907c5166f23b2e244e10 |
class KeychainMetric(Metric): <NEW_LINE> <INDENT> DISPLAY_NAME = 'OSX_Keychain_Access' <NEW_LINE> HISTOGRAM_NAME = 'OSX.Keychain.Access' <NEW_LINE> @staticmethod <NEW_LINE> def _CheckKeychainConfiguration(): <NEW_LINE> <INDENT> warning_suffix = ('which will cause some Telemetry tests to stall when run' ' on a headless machine (e.g. perf bot).') <NEW_LINE> if keychain_helper.IsKeychainLocked(): <NEW_LINE> <INDENT> logging.warning('The default keychain is locked, %s', warning_suffix) <NEW_LINE> <DEDENT> if keychain_helper.DoesKeychainHaveTimeout(): <NEW_LINE> <INDENT> logging.warning('The default keychain is configured to automatically' ' lock itself have a period of time, %s', warning_suffix) <NEW_LINE> <DEDENT> chrome_acl_configured = (keychain_helper. IsKeychainConfiguredForBotsWithChrome()) <NEW_LINE> chromium_acl_configured = (keychain_helper. IsKeychainConfiguredForBotsWithChromium()) <NEW_LINE> acl_warning = ('A commonly used %s key stored in the default keychain does' ' not give decryption access to all applications, %s') <NEW_LINE> if not chrome_acl_configured: <NEW_LINE> <INDENT> logging.warning(acl_warning, 'Chrome', warning_suffix) <NEW_LINE> <DEDENT> if not chromium_acl_configured: <NEW_LINE> <INDENT> logging.warning(acl_warning, 'Chromium', warning_suffix) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def CustomizeBrowserOptions(cls, options): <NEW_LINE> <INDENT> if sys.platform != 'darwin': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> KeychainMetric._CheckKeychainConfiguration() <NEW_LINE> options.AppendExtraBrowserArgs(['--enable-stats-collection-bindings']) <NEW_LINE> <DEDENT> def AddResults(self, tab, results): <NEW_LINE> <INDENT> if sys.platform != 'darwin': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> access_count = histogram_util.GetHistogramSum( histogram_util.BROWSER_HISTOGRAM, KeychainMetric.HISTOGRAM_NAME, tab) <NEW_LINE> results.AddValue(scalar.ScalarValue( results.current_page, KeychainMetric.DISPLAY_NAME, 'count', access_count)) | KeychainMetric gathers keychain statistics from the browser object.
This includes the number of times that the keychain was accessed. | 6259907c091ae35668706677 |
class IllegalConfig(BoggartException): <NEW_LINE> <INDENT> def __init__(self, reason: str) -> None: <NEW_LINE> <INDENT> super().__init__(reason) | Used to indicate that a given configuration is syntatically correct but
that it describes an illegal configuration. | 6259907c4f88993c371f123e |
class SheetStatus(Enum): <NEW_LINE> <INDENT> draft = '0' <NEW_LINE> submit = '1' <NEW_LINE> recall = '2' <NEW_LINE> approve = '5' <NEW_LINE> discard = '9' | 单据状态 | 6259907c167d2b6e312b82b0 |
class ImplicationExpert(object): <NEW_LINE> <INDENT> def __init__(self, formal_context): <NEW_LINE> <INDENT> self.formal_context = formal_context <NEW_LINE> self.implication_query_count = 0 <NEW_LINE> <DEDENT> def is_valid(self, implication): <NEW_LINE> <INDENT> return self.provide_counterexample(implication) is None <NEW_LINE> <DEDENT> def provide_counterexample(self, implication): <NEW_LINE> <INDENT> self.implication_query_count += 1 <NEW_LINE> for obj in self.formal_context.intents(): <NEW_LINE> <INDENT> if not implication.is_respected(obj): <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> <DEDENT> return None | Answers to two types of queries:
is_valid,
request of counterexample. | 6259907ca8370b77170f1e08 |
class Edge: <NEW_LINE> <INDENT> __slots__ = '_origin', '_destination' <NEW_LINE> def __init__(self, u, v): <NEW_LINE> <INDENT> self._origin = u <NEW_LINE> self._destination = v <NEW_LINE> <DEDENT> def endpoints(self): <NEW_LINE> <INDENT> return (self._origin, self._destination) <NEW_LINE> <DEDENT> def opposite(self, v): <NEW_LINE> <INDENT> if not isinstance(v, Graph.Vertex): <NEW_LINE> <INDENT> raise TypeError('v must be a Vertex') <NEW_LINE> <DEDENT> if v is self._origin: <NEW_LINE> <INDENT> return self._destination <NEW_LINE> <DEDENT> elif v is self._destination: <NEW_LINE> <INDENT> return self._origin <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('v not incident to edge') <NEW_LINE> <DEDENT> <DEDENT> def next_edge(self, lnk_list): <NEW_LINE> <INDENT> current = lnk_list.search(self) <NEW_LINE> next_loc = current.get_next() <NEW_LINE> if next_loc != None: <NEW_LINE> <INDENT> return next_loc.get_data() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash( (self._origin, self._destination) ) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '({0}<->{1})'.format(self._origin,self._destination) | Lightweight edge structure for a graph. | 6259907c97e22403b383c93a |
class TestListDirectoryTSKWindows(base.TestVFSPathExists): <NEW_LINE> <INDENT> platforms = ["Windows"] <NEW_LINE> flow = filesystem.ListDirectory.__name__ <NEW_LINE> args = { "pathspec": rdf_paths.PathSpec( path="C:\\Windows", pathtype=rdf_paths.PathSpec.PathType.TSK) } <NEW_LINE> test_output_path = "/fs/tsk/.*/C:/Windows/regedit.exe" <NEW_LINE> def CheckFlow(self): <NEW_LINE> <INDENT> found = False <NEW_LINE> filename = self.test_output_path.split("/")[-1] <NEW_LINE> for windir in ["Windows", "WINDOWS"]: <NEW_LINE> <INDENT> urn = self.client_id.Add("/fs/tsk") <NEW_LINE> fd = aff4.FACTORY.Open(urn, mode="r", token=self.token) <NEW_LINE> volumes = list(fd.OpenChildren()) <NEW_LINE> for volume in volumes: <NEW_LINE> <INDENT> fd = aff4.FACTORY.Open( volume.urn.Add(windir), mode="r", token=self.token) <NEW_LINE> children = list(fd.OpenChildren()) <NEW_LINE> for child in children: <NEW_LINE> <INDENT> if filename == child.urn.Basename(): <NEW_LINE> <INDENT> found = True <NEW_LINE> self.delete_urns.add(child.urn.Add(filename)) <NEW_LINE> self.delete_urns.add(child.urn) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> self.assertTrue(found) | Tests if ListDirectory works on Windows using Sleuthkit. | 6259907c796e427e538501b3 |
class Stats(ProcessEvent): <NEW_LINE> <INDENT> def my_init(self): <NEW_LINE> <INDENT> self._start_time = time.time() <NEW_LINE> self._stats = {} <NEW_LINE> self._stats_lock = threading.Lock() <NEW_LINE> <DEDENT> def process_default(self, event): <NEW_LINE> <INDENT> self._stats_lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> events = event.maskname.split('|') <NEW_LINE> for event_name in events: <NEW_LINE> <INDENT> count = self._stats.get(event_name, 0) <NEW_LINE> self._stats[event_name] = count + 1 <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self._stats_lock.release() <NEW_LINE> <DEDENT> <DEDENT> def _stats_copy(self): <NEW_LINE> <INDENT> self._stats_lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> return self._stats.copy() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._stats_lock.release() <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> stats = self._stats_copy() <NEW_LINE> elapsed = int(time.time() - self._start_time) <NEW_LINE> elapsed_str = '' <NEW_LINE> if elapsed < 60: <NEW_LINE> <INDENT> elapsed_str = str(elapsed) + 'sec' <NEW_LINE> <DEDENT> elif 60 <= elapsed < 3600: <NEW_LINE> <INDENT> elapsed_str = '%dmn%dsec' % (elapsed / 60, elapsed % 60) <NEW_LINE> <DEDENT> elif 3600 <= elapsed < 86400: <NEW_LINE> <INDENT> elapsed_str = '%dh%dmn' % (elapsed / 3600, (elapsed % 3600) / 60) <NEW_LINE> <DEDENT> elif elapsed >= 86400: <NEW_LINE> <INDENT> elapsed_str = '%dd%dh' % (elapsed / 86400, (elapsed % 86400) / 3600) <NEW_LINE> <DEDENT> stats['ElapsedTime'] = elapsed_str <NEW_LINE> l = [] <NEW_LINE> for ev, value in sorted(stats.items(), key=lambda x: x[0]): <NEW_LINE> <INDENT> l.append(' %s=%s' % (output_format.field_name(ev), output_format.field_value(value))) <NEW_LINE> <DEDENT> s = '<%s%s >' % (output_format.class_name(self.__class__.__name__), ''.join(l)) <NEW_LINE> return s <NEW_LINE> <DEDENT> def dump(self, filename): <NEW_LINE> <INDENT> flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL <NEW_LINE> fd = os.open(filename, flags, 0o0600) <NEW_LINE> os.write(fd, bytes(self.__str__(), locale.getpreferredencoding())) <NEW_LINE> os.close(fd) <NEW_LINE> <DEDENT> def __str__(self, scale=45): <NEW_LINE> <INDENT> stats = self._stats_copy() <NEW_LINE> if not stats: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> m = max(stats.values()) <NEW_LINE> unity = scale / m <NEW_LINE> fmt = '%%-26s%%-%ds%%s' % (len(output_format.field_value('@' * scale)) + 1) <NEW_LINE> def func(x): <NEW_LINE> <INDENT> return fmt % (output_format.field_name(x[0]), output_format.field_value('@' * int(x[1] * unity)), output_format.simple('%d' % x[1], 'yellow')) <NEW_LINE> <DEDENT> s = '\n'.join(map(func, sorted(stats.items(), key=lambda x: x[0]))) <NEW_LINE> return s | Compute and display trivial statistics about processed events. | 6259907c3617ad0b5ee07b87 |
class UpdateFilmSchema(CreateFilmSchema): <NEW_LINE> <INDENT> title = SchemaNode(String(), missing=None) | A schema to validate input parameters intended to UPDATE an existing film. | 6259907c3346ee7daa33837e |
class StylesView(StylesBase, ViewletBase): <NEW_LINE> <INDENT> pass | Styles Viewlet | 6259907cf548e778e596cfcb |
class ServiceHostTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.service = service_create('example.com', 'nopass') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> Service.objects.all().delete() <NEW_LINE> <DEDENT> def get_service(self): <NEW_LINE> <INDENT> return Service.objects.get(username='example.com') <NEW_LINE> <DEDENT> def get_hosts(self): <NEW_LINE> <INDENT> return self.get_service().hosts.values_list('address', flat=True) <NEW_LINE> <DEDENT> def assertCountEqual(self, actual, expected, msg=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super(ServiceHostTests, self).assertCountEqual( actual, expected, msg) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self.assertEqual(set(actual), set(expected), msg) <NEW_LINE> self.assertEqual(len(actual), len(expected)) <NEW_LINE> <DEDENT> <DEDENT> def test_add_host(self): <NEW_LINE> <INDENT> hosts = ['127.0.0.1'] <NEW_LINE> self.assertIsNone(self.service.add_hosts(*hosts)) <NEW_LINE> self.assertCountEqual(self.get_hosts(), hosts) <NEW_LINE> <DEDENT> def test_set_hosts(self): <NEW_LINE> <INDENT> hosts = ['127.0.0.1', '::1'] <NEW_LINE> self.assertIsNone(self.service.set_hosts(*hosts)) <NEW_LINE> self.assertCountEqual(self.get_hosts(), hosts) <NEW_LINE> <DEDENT> def test_verify_host(self): <NEW_LINE> <INDENT> hosts = ['127.0.0.1', '::1'] <NEW_LINE> self.assertIsNone(self.service.set_hosts(*hosts)) <NEW_LINE> self.assertTrue(self.service.verify_host('127.0.0.1')) <NEW_LINE> self.assertTrue(self.service.verify_host('::1')) <NEW_LINE> self.assertFalse(self.service.verify_host('127.0.0.2')) <NEW_LINE> self.assertFalse(self.service.verify_host('::2')) <NEW_LINE> <DEDENT> def test_verify(self): <NEW_LINE> <INDENT> hosts = ['127.0.0.1', '::1'] <NEW_LINE> self.assertIsNone(self.service.set_hosts(*hosts)) <NEW_LINE> self.assertTrue(self.service.verify('nopass', '127.0.0.1')) <NEW_LINE> self.assertTrue(self.service.verify('nopass', '::1')) <NEW_LINE> self.assertFalse(self.service.verify('wrong', '127.0.0.1')) <NEW_LINE> self.assertFalse(self.service.verify('wrong', '::1')) <NEW_LINE> self.assertFalse(self.service.verify('nopass', '127.0.0.2')) <NEW_LINE> self.assertFalse(self.service.verify('nopass', '::2')) <NEW_LINE> <DEDENT> def test_del_hosts(self): <NEW_LINE> <INDENT> hosts = ['127.0.0.1'] <NEW_LINE> self.service.add_hosts(*hosts) <NEW_LINE> self.assertCountEqual(self.get_hosts(), hosts) <NEW_LINE> self.assertIsNone(self.service.del_hosts(*hosts)) <NEW_LINE> self.assertCountEqual(self.get_service().hosts.all(), []) <NEW_LINE> <DEDENT> def test_del_hosts_gone(self): <NEW_LINE> <INDENT> self.assertCountEqual(self.get_service().hosts.all(), []) <NEW_LINE> self.assertIsNone(self.service.del_hosts(*['127.0.0.1'])) <NEW_LINE> self.assertCountEqual(self.get_service().hosts.all(), []) <NEW_LINE> <DEDENT> def test_create_invalid_host(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> service_create('fs:inf', 'foobar') <NEW_LINE> self.fail() <NEW_LINE> <DEDENT> except ServiceUsernameNotValid: <NEW_LINE> <INDENT> self.assertCountEqual(Service.objects.all(), [self.service]) | Test Service model, more specifically the hosts functionality. This is not
exposed via the API. | 6259907c1f5feb6acb164632 |
class KeyValue(Data): <NEW_LINE> <INDENT> aliases = ['keyvalue'] <NEW_LINE> _settings = { 'storage-type' : 'sqlite3' } <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> def data(self): <NEW_LINE> <INDENT> raise Exception("No data method for KeyValue type data.") <NEW_LINE> <DEDENT> def storage_class_alias(self, file_ext): <NEW_LINE> <INDENT> if file_ext == '.sqlite3': <NEW_LINE> <INDENT> return 'sqlite3' <NEW_LINE> <DEDENT> elif file_ext == '.json': <NEW_LINE> <INDENT> return 'json' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.setting('storage-type') <NEW_LINE> <DEDENT> <DEDENT> def value(self, key, throwException=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.storage[key] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> if throwException: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def like(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.storage.like(key) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> msg = "The `like()` method is not implemented for storage type '%s'" <NEW_LINE> msgargs = self.storage.alias <NEW_LINE> raise dexy.exceptions.UserFeedback(msg % msgargs) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.value(key) <NEW_LINE> <DEDENT> def append(self, key, value): <NEW_LINE> <INDENT> self.storage.append(key, value) <NEW_LINE> <DEDENT> def query(self, query): <NEW_LINE> <INDENT> return self.storage.query(query) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self.storage.keys() <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return self.storage.items() <NEW_LINE> <DEDENT> def iteritems(self): <NEW_LINE> <INDENT> return self.storage.iteritems() <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.storage.persist() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> msg = u"Problem saving '%s': %s" % (self.key, unicode(e)) <NEW_LINE> raise dexy.exceptions.InternalDexyProblem(msg) | Data class for key-value data. | 6259907c5fdd1c0f98e5f9b9 |
class PapermillRateLimitException(PapermillException): <NEW_LINE> <INDENT> pass | Raised when an io request has been rate limited | 6259907c56b00c62f0fb430e |
class _HistoryEditor(Editor): <NEW_LINE> <INDENT> def init(self, parent): <NEW_LINE> <INDENT> self.control = control = QtGui.QComboBox() <NEW_LINE> control.setEditable(True) <NEW_LINE> control.setInsertPolicy(QtGui.QComboBox.InsertAtTop) <NEW_LINE> if self.factory.entries > 0: <NEW_LINE> <INDENT> signal = QtCore.SIGNAL('rowsInserted(const QModelIndex&, int, int)') <NEW_LINE> QtCore.QObject.connect(control.model(), signal, self._truncate) <NEW_LINE> <DEDENT> if self.factory.auto_set: <NEW_LINE> <INDENT> signal = QtCore.SIGNAL('editTextChanged(QString)') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> signal = QtCore.SIGNAL('activated(QString)') <NEW_LINE> <DEDENT> QtCore.QObject.connect(control, signal, self.update_object) <NEW_LINE> self.set_tooltip() <NEW_LINE> <DEDENT> def update_object(self, text): <NEW_LINE> <INDENT> if not self._no_update: <NEW_LINE> <INDENT> self.value = unicode(text) <NEW_LINE> <DEDENT> <DEDENT> def update_editor(self): <NEW_LINE> <INDENT> self._no_update = True <NEW_LINE> self.control.setEditText(self.str_value) <NEW_LINE> self._no_update = False <NEW_LINE> <DEDENT> def restore_prefs(self, prefs): <NEW_LINE> <INDENT> history = prefs.get('history') <NEW_LINE> if history: <NEW_LINE> <INDENT> self._no_update = True <NEW_LINE> self.control.addItems(history[:self.factory.entries]) <NEW_LINE> self.control.setEditText(self.str_value) <NEW_LINE> self._no_update = False <NEW_LINE> <DEDENT> <DEDENT> def save_prefs(self): <NEW_LINE> <INDENT> history = [ str(self.control.itemText(index)) for index in xrange(self.control.count()) ] <NEW_LINE> if self.ui.result: <NEW_LINE> <INDENT> current = str(self.control.currentText()) <NEW_LINE> if current != self.str_value: <NEW_LINE> <INDENT> history.insert(0, current) <NEW_LINE> <DEDENT> <DEDENT> return { 'history': history } <NEW_LINE> <DEDENT> def _truncate(self, parent, start, end): <NEW_LINE> <INDENT> diff = self.control.count() - self.factory.entries <NEW_LINE> if diff > 0: <NEW_LINE> <INDENT> for i in xrange(diff): <NEW_LINE> <INDENT> self.control.removeItem(self.factory.entries) | Simple style text editor, which displays a text field and maintains a
history of previously entered values, the maximum number of which is
specified by the 'entries' trait of the HistoryEditor factory. | 6259907c23849d37ff852af3 |
class Orcid(ArticleScraper): <NEW_LINE> <INDENT> aliases = ['orcid'] <NEW_LINE> _settings = { 'orcid' : ("ORCID of author to process, or a list of ORCIDS.", None), "period" : ("Custom setting for article 'period'.", "manual"), 'orcid-data-file' : ("File to save data under.", "orcid.pickle") } <NEW_LINE> def scrape(self): <NEW_LINE> <INDENT> if not self.setting('orcid'): <NEW_LINE> <INDENT> raise Exception("Must provide an ORCID.") <NEW_LINE> <DEDENT> if isinstance(self.setting('orcid'), basestring): <NEW_LINE> <INDENT> orcids = [self.setting('orcid')] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> orcids = self.setting('orcid') <NEW_LINE> <DEDENT> responses = [orcid.get(orcd) for orcd in orcids] <NEW_LINE> orcid_filepath = os.path.join(self.work_dir(), self.setting('orcid-data-file')) <NEW_LINE> with open(orcid_filepath, 'wb') as f: <NEW_LINE> <INDENT> pickle.dump(responses, f) <NEW_LINE> <DEDENT> <DEDENT> def process(self): <NEW_LINE> <INDENT> orcid_filepath = os.path.join(self.cache_dir(), self.setting('orcid-data-file')) <NEW_LINE> with open(orcid_filepath, 'rb') as f: <NEW_LINE> <INDENT> responses = pickle.load(f) <NEW_LINE> <DEDENT> for response in responses: <NEW_LINE> <INDENT> args = (response.orcid, response.given_name, response.family_name) <NEW_LINE> list_name = "ORCID %s Author: %s %s" % args <NEW_LINE> article_list = ArticleList.create(name = list_name, orcid = response.orcid, source = self.alias) <NEW_LINE> for pub in response.publications: <NEW_LINE> <INDENT> doi = None <NEW_LINE> if pub.external_ids: <NEW_LINE> <INDENT> for ext_id in pub.external_ids: <NEW_LINE> <INDENT> if ext_id.type == "DOI": <NEW_LINE> <INDENT> doi = ext_id.id <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> article = Article.create_or_update_by_doi({ 'source' : self.alias, 'doi' : doi, "period" : self.setting('period'), 'url' : pub.url, 'title' : pub.title }) <NEW_LINE> article_list.add_article(article) <NEW_LINE> <DEDENT> return article_list | Generate lists of articles for authors based on ORCID. | 6259907c3317a56b869bf263 |
class Comment(models.Model): <NEW_LINE> <INDENT> nid = models.AutoField(primary_key=True) <NEW_LINE> user = models.ForeignKey(to='UserInfo', to_field='nid') <NEW_LINE> article = models.ForeignKey(to='Article', to_field='nid') <NEW_LINE> content = models.CharField(max_length=256) <NEW_LINE> create_time = models.DateTimeField(auto_now_add=True) <NEW_LINE> parent_comment = models.ForeignKey('self', null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.content | 评论表 | 6259907c4a966d76dd5f0921 |
class TechIndicator(models.Model): <NEW_LINE> <INDENT> standard = models.CharField('standard', max_length=20) <NEW_LINE> description = models.TextField() <NEW_LINE> tech_standards = models.ManyToManyField("TechnologyStandard") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.standard <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Kentucky's Teacher Technology Standard" <NEW_LINE> verbose_name_plural = "Kentucky's Teacher Technology Standards" <NEW_LINE> ordering = ['id',] | Tech Indicator class. | 6259907c7cff6e4e811b747b |
class Blog(models.Model): <NEW_LINE> <INDENT> user_id=models.ForeignKey(BloggerLogin, on_delete=models.CASCADE) <NEW_LINE> com_id=models.ForeignKey(Company, on_delete=models.CASCADE,default=-1) <NEW_LINE> cat_id=models.ForeignKey(Category, on_delete=models.CASCADE) <NEW_LINE> subcat_id=models.ForeignKey(SubCategory, on_delete=models.CASCADE) <NEW_LINE> blog_title=models.CharField(max_length=300,null=False) <NEW_LINE> blog_desc=models.TextField() <NEW_LINE> blog_image=models.ImageField(upload_to='uploads/blog_image/', default='blog/images/already.png') <NEW_LINE> blog_banner=models.ImageField(upload_to='uploads/blog_banner/', default='blog/images/already.png') <NEW_LINE> blog_video=models.CharField(max_length=400) <NEW_LINE> active = models.IntegerField(default=1) <NEW_LINE> created_timestamp = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_timestamp = models.DateTimeField(auto_now=True) | This is for the writing of the blog. | 6259907cbe7bc26dc9252b73 |
class TrafficAnalyticsProperties(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'network_watcher_flow_analytics_configuration': {'key': 'networkWatcherFlowAnalyticsConfiguration', 'type': 'TrafficAnalyticsConfigurationProperties'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(TrafficAnalyticsProperties, self).__init__(**kwargs) <NEW_LINE> self.network_watcher_flow_analytics_configuration = kwargs.get('network_watcher_flow_analytics_configuration', None) | Parameters that define the configuration of traffic analytics.
:param network_watcher_flow_analytics_configuration: Parameters that define the configuration
of traffic analytics.
:type network_watcher_flow_analytics_configuration:
~azure.mgmt.network.v2020_04_01.models.TrafficAnalyticsConfigurationProperties | 6259907c4f6381625f19a1cb |
class Monoid: <NEW_LINE> <INDENT> def __init__(self, one: T, elements: set[T], function: Callable[[T, T], T]): <NEW_LINE> <INDENT> assert one in elements <NEW_LINE> self.one = one <NEW_LINE> self.elements = elements <NEW_LINE> self.function = function <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def monoid_from_generators(generators: set[T], equivalence_relation: Mapping[[T], T]) -> Monoid: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def submonoid_from_equivalence_relation(self, equivalence_relation: Mapping[[T], T]) -> Monoid: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def submonoids(self): <NEW_LINE> <INDENT> raise NotImplementedError | We use the multiplicative notation, e.g: 1, S, f where:
for all a,b in S:
f(1,a)=f(a,1)=a
f(a,b) in S | 6259907c55399d3f05627f4f |
class TiledGrid3D(GridBase): <NEW_LINE> <INDENT> def _init(self): <NEW_LINE> <INDENT> ver_pts, tex_pts = self._calculate_vertex_points() <NEW_LINE> self.vertex_list = pyglet.graphics.vertex_list(self.grid.x * self.grid.y * 4, "t2f", "v3f/stream", "c4B") <NEW_LINE> self.vertex_points = ver_pts[:] <NEW_LINE> self.vertex_list.vertices = ver_pts <NEW_LINE> self.vertex_list.tex_coords = tex_pts <NEW_LINE> self.vertex_list.colors = (255, 255, 255, 255) * self.grid.x * self.grid.y * 4 <NEW_LINE> <DEDENT> def _blit(self): <NEW_LINE> <INDENT> self.vertex_list.draw(pyglet.gl.GL_QUADS) <NEW_LINE> <DEDENT> def _calculate_vertex_points(self): <NEW_LINE> <INDENT> w = float(self.texture.width) <NEW_LINE> h = float(self.texture.height) <NEW_LINE> vertex_points = [] <NEW_LINE> texture_points = [] <NEW_LINE> for x in range(0, self.grid.x): <NEW_LINE> <INDENT> for y in range(0, self.grid.y): <NEW_LINE> <INDENT> x1 = x * self.x_step <NEW_LINE> x2 = x1 + self.x_step <NEW_LINE> y1 = y * self.y_step <NEW_LINE> y2 = y1 + self.y_step <NEW_LINE> vertex_points += [x1, y1, 0, x2, y1, 0, x2, y2, 0, x1, y2, 0] <NEW_LINE> texture_points += [x1 / w, y1 / h, x2 / w, y1 / h, x2 / w, y2 / h, x1 / w, y2 / h] <NEW_LINE> <DEDENT> <DEDENT> return vertex_points, texture_points <NEW_LINE> <DEDENT> def set_tile(self, x, y, coords): <NEW_LINE> <INDENT> idx = (self.grid.y * x + y) * 4 * 3 <NEW_LINE> self.vertex_list.vertices[idx:idx + 12] = coords <NEW_LINE> <DEDENT> def get_original_tile(self, x, y): <NEW_LINE> <INDENT> idx = (self.grid.y * x + y) * 4 * 3 <NEW_LINE> return self.vertex_points[idx:idx + 12] <NEW_LINE> <DEDENT> def get_tile(self, x, y): <NEW_LINE> <INDENT> idx = (self.grid.y * x + y) * 4 * 3 <NEW_LINE> return self.vertex_list.vertices[idx:idx + 12] | `TiledGrid3D` is a 3D grid implementation. It differs from `Grid3D` in that
the tiles can be separated from the grid.
The vertex array will be built with::
self.vertex_list.vertices: x,y,z (floats)
self.vertex_list.tex_coords: x,y (floats)
self.vertex_list.colors: RGBA, with values from 0 - 255 | 6259907c5fcc89381b266e79 |
class Linear(Kern): <NEW_LINE> <INDENT> def __init__(self, input_dim, variance=1.0, active_dims=None, ARD=False, name=None): <NEW_LINE> <INDENT> Kern.__init__(self, input_dim, active_dims, name=name) <NEW_LINE> self.ARD = ARD <NEW_LINE> if ARD: <NEW_LINE> <INDENT> variance = np.ones(self.input_dim) * variance <NEW_LINE> self.variance = Parameter(variance, transform=transforms.positive) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.variance = Parameter(variance, transform=transforms.positive) <NEW_LINE> <DEDENT> <DEDENT> def K(self, X, X2=None, presliced=False): <NEW_LINE> <INDENT> if not presliced: <NEW_LINE> <INDENT> X, X2 = self._slice(X, X2) <NEW_LINE> <DEDENT> if X2 is None: <NEW_LINE> <INDENT> return tf.matmul(X * self.variance, X, transpose_b=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return tf.matmul(X * self.variance, X2, transpose_b=True) <NEW_LINE> <DEDENT> <DEDENT> def Kdiag(self, X, presliced=False): <NEW_LINE> <INDENT> if not presliced: <NEW_LINE> <INDENT> X, _ = self._slice(X, None) <NEW_LINE> <DEDENT> return tf.reduce_sum(tf.square(X) * self.variance, 1) | The linear kernel | 6259907c442bda511e95da75 |
class Globalarrays(CMakePackage): <NEW_LINE> <INDENT> homepage = "http://hpc.pnl.gov/globalarrays/" <NEW_LINE> url = "https://github.com/GlobalArrays/ga" <NEW_LINE> version('master', git='https://github.com/GlobalArrays/ga', branch='master') <NEW_LINE> depends_on('blas') <NEW_LINE> depends_on('lapack') <NEW_LINE> depends_on('mpi') <NEW_LINE> patch('ibm-xl.patch', when='%xl') <NEW_LINE> patch('ibm-xl.patch', when='%xl_r') <NEW_LINE> def cmake_args(self): <NEW_LINE> <INDENT> options = [] <NEW_LINE> options.extend([ '-DENABLE_FORTRAN=ON', '-DENABLE_BLAS=ON', ]) <NEW_LINE> if self.compiler.name == 'xl' or self.compiler.name == 'xl_r': <NEW_LINE> <INDENT> options.extend([ '-DCMAKE_Fortran_COMPILER=%s' % self.compiler.f77, '-DCMAKE_Fortran_FLAGS=-qzerosize' ]) <NEW_LINE> <DEDENT> return options | The Global Arrays (GA) toolkit provides a shared memory style programming
environment in the context of distributed array data structures. | 6259907c3617ad0b5ee07b89 |
class HTTPUploader(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, i, request, start, size, timeout, opener=None, shutdown_event=None): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.request = request <NEW_LINE> self.request.data.start = self.starttime = start <NEW_LINE> self.size = size <NEW_LINE> self.result = 0 <NEW_LINE> self.timeout = timeout <NEW_LINE> self.i = i <NEW_LINE> if opener: <NEW_LINE> <INDENT> self._opener = opener.open <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._opener = urlopen <NEW_LINE> <DEDENT> if shutdown_event: <NEW_LINE> <INDENT> self._shutdown_event = shutdown_event <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._shutdown_event = FakeShutdownEvent() <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> request = self.request <NEW_LINE> try: <NEW_LINE> <INDENT> if ((timeit.default_timer() - self.starttime) <= self.timeout and not event_is_set(self._shutdown_event)): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = self._opener(request) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> request = build_request(self.request.get_full_url(), data=request.data.read(self.size)) <NEW_LINE> f = self._opener(request) <NEW_LINE> <DEDENT> f.read(11) <NEW_LINE> f.close() <NEW_LINE> self.result = sum(self.request.data.total) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.result = 0 <NEW_LINE> <DEDENT> <DEDENT> except (IOError, SpeedtestUploadTimeout): <NEW_LINE> <INDENT> self.result = sum(self.request.data.total) <NEW_LINE> <DEDENT> except HTTP_ERRORS: <NEW_LINE> <INDENT> self.result = 0 | Thread class for putting a URL | 6259907c3346ee7daa33837f |
class BooleanMetric(Metric): <NEW_LINE> <INDENT> def _populate_value(self, metric, value, start_time): <NEW_LINE> <INDENT> metric.boolean_value = value <NEW_LINE> <DEDENT> def set(self, value, fields=None, target_fields=None): <NEW_LINE> <INDENT> if not isinstance(value, bool): <NEW_LINE> <INDENT> raise errors.MonitoringInvalidValueTypeError(self._name, value) <NEW_LINE> <DEDENT> self._set(fields, target_fields, value) <NEW_LINE> <DEDENT> def is_cumulative(self): <NEW_LINE> <INDENT> return False | A metric whose value type is a boolean. | 6259907c5fdd1c0f98e5f9bb |
class InvalidControlType(ProtoError): <NEW_LINE> <INDENT> pass | Invalid Control Type.
Invalid control type provided for Fast Operate, must be member of:
['REMOTE_BIT', 'BREAKER_BIT']. | 6259907cdc8b845886d54ff7 |
@TwoCompatibleThree <NEW_LINE> @classbuilder( bases=( DeepClass("_aor_", { "username": {dck.check: lambda x: isinstance(x, bytes)}, "host": { dck.descriptor: ParsedPropertyOfClass(Host), dck.gen: Host}}), Parser, TupleRepresentable, ValueBinder ) ) <NEW_LINE> class AOR: <NEW_LINE> <INDENT> parseinfo = { Parser.Pattern: b"(?:(%(user)s|%(telephone_subscriber)s)(?::%(password)s)?@)?" b"(%(hostport)s)" % abnf_name_bdict, Parser.Mappings: [("username",), ("host", Host)], Parser.PassMappingsToInit: True, } <NEW_LINE> vb_dependencies = [ ["host", ["address", "port"]]] <NEW_LINE> @classmethod <NEW_LINE> def ExtractAOR(cls, target): <NEW_LINE> <INDENT> if hasattr(target, "aor"): <NEW_LINE> <INDENT> return target.aor <NEW_LINE> <DEDENT> if isinstance(target, AOR): <NEW_LINE> <INDENT> return target <NEW_LINE> <DEDENT> if isinstance(target, bytes): <NEW_LINE> <INDENT> return cls.Parse(target) <NEW_LINE> <DEDENT> raise TypeError( "%r instance cannot be derived from %r instance." % ( AOR.__class__.__name__, target.__class__.__name__)) <NEW_LINE> <DEDENT> host = ParsedProperty("_aor_host", Host) <NEW_LINE> def __init__(self, username=None, host=None, **kwargs): <NEW_LINE> <INDENT> super(AOR, self).__init__(username=username, host=host, **kwargs) <NEW_LINE> <DEDENT> def __bytes__(self): <NEW_LINE> <INDENT> host = self.host <NEW_LINE> if host is None: <NEW_LINE> <INDENT> raise Incomplete("AOR %r does not have a host" % self) <NEW_LINE> <DEDENT> host_bytes = bytes(host) <NEW_LINE> if len(host_bytes) == 0: <NEW_LINE> <INDENT> raise Incomplete("AOR %r has a zero-length host" % self) <NEW_LINE> <DEDENT> uname = self.username <NEW_LINE> if uname: <NEW_LINE> <INDENT> return b"%s@%s" % (uname, host_bytes) <NEW_LINE> <DEDENT> return host_bytes <NEW_LINE> <DEDENT> def tupleRepr(self): <NEW_LINE> <INDENT> return (self.username, self.host) | A AOR object. | 6259907c7d43ff2487428133 |
class ViewPollResults(DetailView): <NEW_LINE> <INDENT> context_object_name = 'poll' <NEW_LINE> template_name = 'voting/poll_results.html' <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> space = get_object_or_404(Space, url=kwargs['space_url']) <NEW_LINE> if request.user.has_perm('view_space', space): <NEW_LINE> <INDENT> return super(ViewPollResults, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise PermissionDenied <NEW_LINE> <DEDENT> <DEDENT> def get_object(self): <NEW_LINE> <INDENT> self.poll = get_object_or_404(Poll, pk=self.kwargs['pk']) <NEW_LINE> return self.poll <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(ViewPollResults, self).get_context_data(**kwargs) <NEW_LINE> space = get_object_or_404(Space, url=self.kwargs['space_url']) <NEW_LINE> total_votes = Choice.objects.filter(poll=self.poll) <NEW_LINE> v = 0 <NEW_LINE> for vote in total_votes: <NEW_LINE> <INDENT> v += vote.votes.count() <NEW_LINE> <DEDENT> context['get_place'] = space <NEW_LINE> context['votes_total'] = v <NEW_LINE> return context | Displays an specific poll results. The results are always available even
after the end_date.
.. versionadded:: 0.1.7 beta
:context: get_place | 6259907cf548e778e596cfce |
class AcqSvProfile(object): <NEW_LINE> <INDENT> _parser = Embedded(Struct("AcqSvProfile", ULInt8('job_type'), ULInt8('status'), ULInt16('cn0'), ULInt8('int_time'), Struct('sid', GnssSignal._parser), ULInt16('bin_width'), ULInt32('timestamp'), ULInt32('time_spent'), SLInt32('cf_min'), SLInt32('cf_max'), SLInt32('cf'), ULInt32('cp'),)) <NEW_LINE> __slots__ = [ 'job_type', 'status', 'cn0', 'int_time', 'sid', 'bin_width', 'timestamp', 'time_spent', 'cf_min', 'cf_max', 'cf', 'cp', ] <NEW_LINE> def __init__(self, payload=None, **kwargs): <NEW_LINE> <INDENT> if payload: <NEW_LINE> <INDENT> self.from_binary(payload) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.job_type = kwargs.pop('job_type') <NEW_LINE> self.status = kwargs.pop('status') <NEW_LINE> self.cn0 = kwargs.pop('cn0') <NEW_LINE> self.int_time = kwargs.pop('int_time') <NEW_LINE> self.sid = kwargs.pop('sid') <NEW_LINE> self.bin_width = kwargs.pop('bin_width') <NEW_LINE> self.timestamp = kwargs.pop('timestamp') <NEW_LINE> self.time_spent = kwargs.pop('time_spent') <NEW_LINE> self.cf_min = kwargs.pop('cf_min') <NEW_LINE> self.cf_max = kwargs.pop('cf_max') <NEW_LINE> self.cf = kwargs.pop('cf') <NEW_LINE> self.cp = kwargs.pop('cp') <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return fmt_repr(self) <NEW_LINE> <DEDENT> def from_binary(self, d): <NEW_LINE> <INDENT> p = AcqSvProfile._parser.parse(d) <NEW_LINE> for n in self.__class__.__slots__: <NEW_LINE> <INDENT> setattr(self, n, getattr(p, n)) <NEW_LINE> <DEDENT> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> d = dict([(k, getattr(obj, k)) for k in self.__slots__]) <NEW_LINE> return AcqSvProfile.build(d) | AcqSvProfile.
Profile for a specific SV for debugging purposes
The message describes SV profile during acquisition time.
The message is used to debug and measure the performance.
Parameters
----------
job_type : int
SV search job type (deep, fallback, etc)
status : int
Acquisition status 1 is Success, 0 is Failure
cn0 : int
CN0 value. Only valid if status is '1'
int_time : int
Acquisition integration time
sid : GnssSignal
GNSS signal for which acquisition was attempted
bin_width : int
Acq frequency bin width
timestamp : int
Timestamp of the job complete event
time_spent : int
Time spent to search for sid.code
cf_min : int
Doppler range lowest frequency
cf_max : int
Doppler range highest frequency
cf : int
Doppler value of detected peak. Only valid if status is '1'
cp : int
Codephase of detected peak. Only valid if status is '1'
| 6259907cd486a94d0ba2d9f4 |
class LatestProblemResponseTaskMapLegacyKeysTest(InitializeLegacyKeysMixin, LatestProblemResponseTaskMapTest): <NEW_LINE> <INDENT> pass | Also test with legacy keys | 6259907c656771135c48ad4e |
@tf.keras.utils.register_keras_serializable(package='Text') <NEW_LINE> class BertPretrainer(tf.keras.Model): <NEW_LINE> <INDENT> def __init__(self, network, num_classes, num_token_predictions, activation=None, initializer='glorot_uniform', output='logits', **kwargs): <NEW_LINE> <INDENT> self._self_setattr_tracking = False <NEW_LINE> self._config = { 'network': network, 'num_classes': num_classes, 'num_token_predictions': num_token_predictions, 'activation': activation, 'initializer': initializer, 'output': output, } <NEW_LINE> network_inputs = network.inputs <NEW_LINE> inputs = copy.copy(network_inputs) <NEW_LINE> sequence_output, cls_output = network(network_inputs) <NEW_LINE> sequence_output_length = sequence_output.shape.as_list()[1] <NEW_LINE> if sequence_output_length < num_token_predictions: <NEW_LINE> <INDENT> raise ValueError( "The passed network's output length is %s, which is less than the " 'requested num_token_predictions %s.' % (sequence_output_length, num_token_predictions)) <NEW_LINE> <DEDENT> masked_lm_positions = tf.keras.layers.Input( shape=(num_token_predictions,), name='masked_lm_positions', dtype=tf.int32) <NEW_LINE> inputs.append(masked_lm_positions) <NEW_LINE> self.masked_lm = networks.MaskedLM( num_predictions=num_token_predictions, input_width=sequence_output.shape[-1], source_network=network, activation=activation, initializer=initializer, output=output, name='masked_lm') <NEW_LINE> lm_outputs = self.masked_lm([sequence_output, masked_lm_positions]) <NEW_LINE> self.classification = networks.Classification( input_width=cls_output.shape[-1], num_classes=num_classes, initializer=initializer, output=output, name='classification') <NEW_LINE> sentence_outputs = self.classification(cls_output) <NEW_LINE> super(BertPretrainer, self).__init__( inputs=inputs, outputs=[lm_outputs, sentence_outputs], **kwargs) <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return self._config <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_config(cls, config, custom_objects=None): <NEW_LINE> <INDENT> return cls(**config) | BERT network training model.
This is an implementation of the network structure surrounding a transformer
encoder as described in "BERT: Pre-training of Deep Bidirectional Transformers
for Language Understanding" (https://arxiv.org/abs/1810.04805).
The BertPretrainer allows a user to pass in a transformer stack, and
instantiates the masked language model and classification networks that are
used to create the training objectives.
Arguments:
network: A transformer network. This network should output a sequence output
and a classification output. Furthermore, it should expose its embedding
table via a "get_embedding_table" method.
num_classes: Number of classes to predict from the classification network.
num_token_predictions: Number of tokens to predict from the masked LM.
activation: The activation (if any) to use in the masked LM and
classification networks. If None, no activation will be used.
initializer: The initializer (if any) to use in the masked LM and
classification networks. Defaults to a Glorot uniform initializer.
output: The output style for this network. Can be either 'logits' or
'predictions'. | 6259907c5fdd1c0f98e5f9bc |
class TargetNotFoundError(Exception): <NEW_LINE> <INDENT> pass | An internal use exception that indicates the lack of a valid pixel from a selection operation
e.g. see `FloofillOperation._spiral()` | 6259907c627d3e7fe0e088c3 |
class GesturePerformer: <NEW_LINE> <INDENT> def perform(self, *gestures): <NEW_LINE> <INDENT> for gesture in gestures: <NEW_LINE> <INDENT> gesture(self) | A mixin for performing human gestures | 6259907c7c178a314d78e909 |
class PAResponseUsedNFVIPops(Model): <NEW_LINE> <INDENT> def __init__(self, nfvi_po_pid=None, mapped_vn_fs=None): <NEW_LINE> <INDENT> self.swagger_types = { 'nfvi_po_pid': str, 'mapped_vn_fs': List[str] } <NEW_LINE> self.attribute_map = { 'nfvi_po_pid': 'NFVIPoPID', 'mapped_vn_fs': 'mappedVNFs' } <NEW_LINE> self._nfvi_po_pid = nfvi_po_pid <NEW_LINE> self._mapped_vn_fs = mapped_vn_fs <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt): <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nfvi_po_pid(self): <NEW_LINE> <INDENT> return self._nfvi_po_pid <NEW_LINE> <DEDENT> @nfvi_po_pid.setter <NEW_LINE> def nfvi_po_pid(self, nfvi_po_pid): <NEW_LINE> <INDENT> self._nfvi_po_pid = nfvi_po_pid <NEW_LINE> <DEDENT> @property <NEW_LINE> def mapped_vn_fs(self): <NEW_LINE> <INDENT> return self._mapped_vn_fs <NEW_LINE> <DEDENT> @mapped_vn_fs.setter <NEW_LINE> def mapped_vn_fs(self, mapped_vn_fs): <NEW_LINE> <INDENT> self._mapped_vn_fs = mapped_vn_fs | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259907c167d2b6e312b82b2 |
class GameState(): <NEW_LINE> <INDENT> def __init__(self, model): <NEW_LINE> <INDENT> self.__model = model <NEW_LINE> self.__is_a_clone = False <NEW_LINE> <DEDENT> def get_falling_block_position(self): <NEW_LINE> <INDENT> return self.__model.falling_block_position <NEW_LINE> <DEDENT> def get_falling_block_angle(self): <NEW_LINE> <INDENT> return self.__model.falling_block_angle <NEW_LINE> <DEDENT> def get_falling_block_tiles(self): <NEW_LINE> <INDENT> tilescopy = self.__model.get_falling_block_tiles() <NEW_LINE> return tilescopy <NEW_LINE> <DEDENT> def get_next_block_tiles(self): <NEW_LINE> <INDENT> tilescopy = self.__model.get_next_block_tiles() <NEW_LINE> return tilescopy <NEW_LINE> <DEDENT> def get_falling_block_type(self): <NEW_LINE> <INDENT> return self.__model.falling_block_type <NEW_LINE> <DEDENT> def get_next_block_type(self): <NEW_LINE> <INDENT> return self.__model.next_block_type <NEW_LINE> <DEDENT> def print_block_tiles(self): <NEW_LINE> <INDENT> tiles = self.get_falling_block_tiles() <NEW_LINE> txt = "" <NEW_LINE> size = len(tiles) <NEW_LINE> for _y in range(0, size): <NEW_LINE> <INDENT> for _x in range(0, size): <NEW_LINE> <INDENT> if tiles[_y][_x] != 0: <NEW_LINE> <INDENT> txt += '#' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> txt += '.' <NEW_LINE> <DEDENT> <DEDENT> txt += '\n' <NEW_LINE> <DEDENT> print(txt) <NEW_LINE> <DEDENT> def get_tiles(self): <NEW_LINE> <INDENT> tilescopy = self.__model.get_copy_of_tiles() <NEW_LINE> return tilescopy <NEW_LINE> <DEDENT> def print_tiles(self): <NEW_LINE> <INDENT> tiles = self.get_tiles() <NEW_LINE> txt = "" <NEW_LINE> for _y in range(0, MAXROW): <NEW_LINE> <INDENT> for _x in range(0, MAXCOL): <NEW_LINE> <INDENT> if tiles[_y][_x] != 0: <NEW_LINE> <INDENT> txt += '#' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> txt += '.' <NEW_LINE> <DEDENT> <DEDENT> txt += '\n' <NEW_LINE> <DEDENT> print(txt) <NEW_LINE> <DEDENT> def get_score(self): <NEW_LINE> <INDENT> return self.__model.score <NEW_LINE> <DEDENT> def clone(self, is_dummy): <NEW_LINE> <INDENT> game = GameState(self.__model) <NEW_LINE> game._set_model(self.__model.clone(is_dummy), True) <NEW_LINE> return game <NEW_LINE> <DEDENT> def _set_model(self, model, is_a_clone): <NEW_LINE> <INDENT> self.__model = model <NEW_LINE> self.__is_a_clone = is_a_clone <NEW_LINE> <DEDENT> def move(self, direction): <NEW_LINE> <INDENT> self.__model.move(direction) <NEW_LINE> <DEDENT> def rotate(self, direction): <NEW_LINE> <INDENT> self.__model.rotate(direction) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.__model.is_dummy: <NEW_LINE> <INDENT> (_, landed) = self.__model.update() <NEW_LINE> return landed <NEW_LINE> <DEDENT> return False | GameState maintains the API to be used by an AutoPlayer to communicate with the game | 6259907c5fc7496912d48f89 |
class Float(Typed): <NEW_LINE> <INDENT> _type = float | . | 6259907c7d847024c075de1a |
class MalletClassifier(mallet.Mallet): <NEW_LINE> <INDENT> def _basic_params(self): <NEW_LINE> <INDENT> self.dry_run = False <NEW_LINE> self.name = 'mallet_train-classifier' <NEW_LINE> self.dfr = False <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> self._setup_mallet_instances(sequence=False) <NEW_LINE> self.mallet_output = os.path.join(self.mallet_out_dir, 'trained.classifier') <NEW_LINE> process_args = self.mallet + [ 'cc.mallet.classify.tui.Vectors2Classify', '--input', self.instance_file, '--output-classifier', self.mallet_output, '--trainer', 'NaiveBayes', '--noOverwriteProgressMessages', 'true', ] <NEW_LINE> logging.info('begin training classifier') <NEW_LINE> start_time = time.time() <NEW_LINE> if not self.dry_run: <NEW_LINE> <INDENT> classifier_return = subprocess.call(process_args, stdout=self.progress_file, stderr=self.progress_file) <NEW_LINE> <DEDENT> finished = 'Classifier trained in ' + str(time.time() - start_time) + ' seconds' <NEW_LINE> logging.info(finished) <NEW_LINE> params = {'DONE': finished} <NEW_LINE> self.write_html(params) | Train a classifier | 6259907c4f6381625f19a1cc |
class NewsletterRouter(object): <NEW_LINE> <INDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'newsletter': <NEW_LINE> <INDENT> return 'newsletter' <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'newsletter': <NEW_LINE> <INDENT> return 'newsletter' <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> if obj1._meta.app_label == 'newsletter' or obj2._meta.app_label == 'newsletter': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_migrate(self, db, model): <NEW_LINE> <INDENT> if db == 'newsletter': <NEW_LINE> <INDENT> return model._meta.app_label == 'newsletter' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'newsletter': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return None | A router to control all database operations on models in the
auth application. | 6259907c66673b3332c31e3c |
class DiskFreeMonitor(Monitor): <NEW_LINE> <INDENT> type = 'diskfree' <NEW_LINE> verbose_name = 'Disk free space' <NEW_LINE> average_fields = ['total', 'used', 'free', 'use_per_cent'] <NEW_LINE> cmd = ( 'DATA=$(df -k / | tail -n 1 | sed "s/%//"); ' 'echo $DATA | awk {\'print "{' ' \\"total\\": "$2",' ' \\"used\\": "$3",' ' \\"free\\": "$4",' ' \\"use_per_cent\\": "$5"' '}"\'}' ) <NEW_LINE> def execute(self): <NEW_LINE> <INDENT> problems = [] <NEW_LINE> value = json.loads(run(self.cmd).stdout) <NEW_LINE> limit = MONITOR_DISK_FREE_LIMIT * MONITOR_DISK_WARN_RATIO <NEW_LINE> if value['free'] < limit: <NEW_LINE> <INDENT> msg = 'Low disk space left (%i), potential Rabbitiqm alarm that ' 'will block the producer! (more info on issue report #326)' <NEW_LINE> problems.append(msg % value['free']) <NEW_LINE> <DEDENT> return value, problems | Show the disk usage and warn when free is near to the limit
defined by rabbitmq (#326, #337) | 6259907c8a349b6b43687c99 |
class UnitParseError(UnitError): <NEW_LINE> <INDENT> def __init__(self, expression, message, lineno=None): <NEW_LINE> <INDENT> self.expression = expression <NEW_LINE> self.message = message <NEW_LINE> self.lineno = lineno <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.lineno: <NEW_LINE> <INDENT> return "Unit error for `%s' at line %d: %s" % (self.expression, self.lineno, self.message) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "Unit error for `%s': %s" % (self.expression, self.message) | Class for unit parsing errors. | 6259907c3346ee7daa338380 |
class TestObject(object): <NEW_LINE> <INDENT> def __init__(self, name, typename): <NEW_LINE> <INDENT> self.var_name = name <NEW_LINE> self.var_typename = typename <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.var_name <NEW_LINE> <DEDENT> def getTypeName(self): <NEW_LINE> <INDENT> return self.var_typename | Substitution for Aimsun's GKObject class | 6259907c91f36d47f2231bad |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.