function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def disconnect(self): try: self.__socket.close() except socket.error, msg: self.__socket = None return 0 return 1
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def recv(self): buffer, self.__recv_addr = self.__socket.recvfrom(8192) return buffer
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def get_socket(self): return self.__socket
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def __init__(self, dstip, dstport = 135): DCERPCTransport.__init__(self, dstip, dstport) self.__socket = 0
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def disconnect(self): try: self.__socket.close() except socket.error, msg: self.__socket = None return 0 return 1
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def recv(self): buffer = self.__socket.recv(8192) return buffer
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def connect(self): TCPTransport.connect(self) self.__socket.send('RPC_CONNECT ' + self.get_dip() + ':593 HTTP/1.0\r\n\r\n') data = self.__socket.recv(8192) if data[10:13] != '200': raise Exception("Service not supported.")
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def __init__(self, dstip, dstport = 445, filename = '', username='', password='', lm_hash='', nt_hash=''): DCERPCTransport.__init__(self, dstip, dstport) self.__socket = None self.__smb_server = 0 self.__tid = 0 self.__filename = filename self.__handle = 0 self.__pending_recv = 0 self.set_credentials(username, password, lm_hash, nt_hash)
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def connect(self): self.setup_smb_server() if self.__smb_server.is_login_required(): if self._password != '' or (self._password == '' and self._nt_hash == '' and self._lm_hash == ''): self.__smb_server.login(self._username, self._password) elif self._nt_hash != '' or self._lm_hash != '': self.__smb_server.login(self._username, '', '', self._lm_hash, self._nt_hash) self.__tid = self.__smb_server.tree_connect_andx('\\\\*SMBSERVER\\IPC$') self.__handle = self.__smb_server.nt_create_andx(self.__tid, self.__filename) # self.__handle = self.__smb_server.open_file_andx(self.__tid, r"\\PIPE\%s" % self.__filename, smb.SMB_O_CREAT, smb.SMB_ACCESS_READ)[0] # self.__handle = self.__smb_server.open_file(self.__tid, r"\\PIPE\%s" % self.__filename, smb.SMB_O_CREAT, smb.SMB_ACCESS_READ)[0] self.__socket = self.__smb_server.get_socket() return 1
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def disconnect(self): self.__smb_server.disconnect_tree(self.__tid) self.__smb_server.logoff()
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def recv(self): if self._max_send_frag or self.__pending_recv: # _max_send_frag is checked because it's the same condition we checked # to decide whether to use write_andx() or send_trans() in send() above. if self.__pending_recv: self.__pending_recv -= 1 return self.__smb_server.read_andx(self.__tid, self.__handle, max_size = self._max_recv_frag) else: s = self.__smb_server.recv_packet() if self.__smb_server.isValidAnswer(s,smb.SMB.SMB_COM_TRANSACTION): trans = smb.TRANSHeader(s.get_parameter_words(), s.get_buffer()) data = trans.get_data() return data return None
pwnieexpress/pwn_plug_sources
[ 124, 97, 124, 24, 1321552607 ]
def testAdd(self): ## test method names begin 'test*' self.assertEqual((1 + 2), 3) self.assertEqual(0 + 1, 1)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def __init__(self, head, size): self.head = head self.size = size
lvh/txyoga
[ 24, 5, 24, 15, 1301588690 ]
def test_complete(self): """ Test serialization of a screwdriver. """ screwdriver = Screwdriver("philips", "m3") state = screwdriver.toState() self.assertEqual(state, {"head": "philips", "size": "m3"})
lvh/txyoga
[ 24, 5, 24, 15, 1301588690 ]
def test_deserialize(self): """ Deserialize a screwdriver. """ state = {"head": "philips", "size": "m3"} screwdriver = Screwdriver.fromState(state) for attr, value in state.iteritems(): self.assertEqual(getattr(screwdriver, attr), value)
lvh/txyoga
[ 24, 5, 24, 15, 1301588690 ]
def test_createElement(self): """ Creates a screwdriver from some state, and verify that it has correctly materialized. """ toolbox = Toolbox() state = {"head": "philips", "size": "m3"} screwdriver = toolbox.createElementFromState(state) for attr, value in state.iteritems(): self.assertEqual(getattr(screwdriver, attr), value)
lvh/txyoga
[ 24, 5, 24, 15, 1301588690 ]
def load(cur_graph, rdf_file_path, tmp_dir=None): if os.path.isfile(rdf_file_path): try: cur_graph = __load_graph(cur_graph, rdf_file_path) except IOError: if tmp_dir is not None: current_file_path = tmp_dir + os.sep + "tmp_rdf_file_create_nq.rdf" shutil.copyfile(rdf_file_path, current_file_path) try: cur_graph = __load_graph(cur_graph, current_file_path) except IOError as e: reperr.add_sentence("It was impossible to handle the format used for " "storing the file (stored in the temporary path) '%s'. " "Additional details: %s" % (current_file_path, str(e))) os.remove(current_file_path) else: reperr.add_sentence("It was impossible to try to load the file from the " "temporary path '%s' since that has not been specified in " "advance" % rdf_file_path) else: reperr.add_sentence("The file specified ('%s') doesn't exist." % rdf_file_path) return cur_graph
essepuntato/opencitations
[ 61, 3, 61, 25, 1460444527 ]
def db_table_exists(table): """Check if table exists.""" return table in connection.introspection.table_names()
tonioo/modoboa
[ 2442, 336, 2442, 81, 1366105435 ]
def nolr(tile_type): ''' Remove _L or _R suffix tile_type suffix, if present Ex: BRAM_INT_INTERFACE_L => BRAM_INT_INTERFACE Ex: VBRK => VBRK ''' postfix = tile_type[-2:] if postfix in ('_L', '_R'): return tile_type[:-2] else: return tile_type
SymbiFlow/prjuray
[ 48, 12, 48, 18, 1594844148 ]
def propagate_INT_bits_in_column(database, tiles_by_grid, tile_frames_map): """ Propigate INT offsets up and down INT columns. INT columns appear to be fairly regular, where starting from offset 0, INT tiles next to INT tiles increase the word offset by 3. """ int_frames, int_words = localutil.get_entry('INT', 'CLB_IO_CLK') propagate_bits_in_column( database=database, tiles_by_grid=tiles_by_grid, tile_type='INT', term_b='INT_TERM_B', term_t='INT_TERM_T', rbrk='INT_RBRK', rclk_types=['RCLK_INT_L', 'RCLK_INT_R'], tile_frames=int_frames, tile_words=int_words, tile_frames_map=tile_frames_map)
SymbiFlow/prjuray
[ 48, 12, 48, 18, 1594844148 ]
def propagate_bits_in_column(database, tiles_by_grid, tile_type, term_b, term_t, rbrk, rclk_types, tile_frames, tile_words, tile_frames_map): """ Propigate offsets up and down columns, based on a fixed pattern. """ rclk_frames, rclk_words = localutil.get_entry('RCLK_INT_L', 'CLB_IO_CLK') _, ecc_words = localutil.get_entry('ECC', 'CLB_IO_CLK') seen_int = set() for tile_name in sorted(database.keys()): tile = database[tile_name] if tile['type'] != tile_type: continue if not tile['bits']: continue if tile_name in seen_int: continue # Walk down column while True: seen_int.add(tile_name) next_tile = tiles_by_grid[(tile['grid_x'], tile['grid_y'] + 1)] next_tile_type = database[next_tile]['type'] if tile['bits']['CLB_IO_CLK']['offset'] == 0: assert next_tile_type in [term_b, rbrk], next_tile_type break baseaddr = int(tile['bits']['CLB_IO_CLK']['baseaddr'], 0) offset = tile['bits']['CLB_IO_CLK']['offset'] if tile['type'] == tile_type and next_tile_type == tile['type']: # INT next to INT offset -= tile_words localutil.add_tile_bits(next_tile, database[next_tile], baseaddr, offset, tile_frames, tile_words, tile_frames_map) elif tile['type'] == tile_type: # INT above RCLK assert next_tile_type in rclk_types, next_tile_type offset -= rclk_words localutil.add_tile_bits(next_tile, database[next_tile], baseaddr, offset, rclk_frames, rclk_words, tile_frames_map) offset -= ecc_words else: # RCLK above INT assert tile['type'] in rclk_types, tile['type'] if next_tile_type == tile_type: offset -= ecc_words offset -= tile_words localutil.add_tile_bits(next_tile, database[next_tile], baseaddr, offset, tile_frames, tile_words, tile_frames_map) else: assert next_tile_type in [], next_tile_type break tile_name = next_tile tile = database[tile_name] # Walk up INT column while True: seen_int.add(tile_name) next_tile = tiles_by_grid[(tile['grid_x'], tile['grid_y'] - 1)] next_tile_type = database[next_tile]['type'] if tile['bits']['CLB_IO_CLK']['offset'] == 183: assert next_tile_type in [term_t, rbrk], next_tile_type break baseaddr = int(tile['bits']['CLB_IO_CLK']['baseaddr'], 0) offset = tile['bits']['CLB_IO_CLK']['offset'] if tile['type'] == tile_type and next_tile_type == tile['type']: # INT next to INT offset += tile_words localutil.add_tile_bits(next_tile, database[next_tile], baseaddr, offset, tile_frames, tile_words, tile_frames_map) elif tile['type'] == tile_type: # INT below RCLK assert next_tile_type in rclk_types, next_tile_type offset += tile_words offset += ecc_words localutil.add_tile_bits(next_tile, database[next_tile], baseaddr, offset, rclk_frames, rclk_words, tile_frames_map) else: # RCLK below INT assert tile['type'] in rclk_types, tile['type'] assert next_tile_type == tile_type, next_tile_type offset += rclk_words localutil.add_tile_bits(next_tile, database[next_tile], baseaddr, offset, tile_frames, tile_words, tile_frames_map) tile_name = next_tile tile = database[tile_name]
SymbiFlow/prjuray
[ 48, 12, 48, 18, 1594844148 ]
def propagate_XIPHY_bits_in_column(database, tiles_by_grid, tile_frames_map): xiphy_frames, xiphy_words = localutil.get_entry('XIPHY', 'CLB_IO_CLK') rclk_frames, rclk_words = localutil.get_entry('RCLK_INT_L', 'CLB_IO_CLK') _, ecc_words = localutil.get_entry('ECC', 'CLB_IO_CLK') for tile, tile_data in database.items(): if tile_data['type'] != 'RCLK_XIPHY_OUTER_RIGHT': continue above_tile = tiles_by_grid[tile_data['grid_x'], tile_data['grid_y'] - 1] below_tile = tiles_by_grid[tile_data['grid_x'], tile_data['grid_y'] + 15] assert database[above_tile]['type'] == 'XIPHY_BYTE_RIGHT' assert database[below_tile]['type'] == 'XIPHY_BYTE_RIGHT' baseaddr1 = int(database[above_tile]['bits']['CLB_IO_CLK']['baseaddr'], 0) offset1 = database[above_tile]['bits']['CLB_IO_CLK']['offset'] offset1 -= rclk_words localutil.add_tile_bits(tile, database[tile], baseaddr1, offset1, rclk_frames, rclk_words, tile_frames_map) baseaddr2 = int(database[below_tile]['bits']['CLB_IO_CLK']['baseaddr'], 0) offset2 = database[below_tile]['bits']['CLB_IO_CLK']['offset'] offset2 += xiphy_words offset2 += ecc_words localutil.add_tile_bits(tile, database[tile], baseaddr2, offset2, rclk_frames, rclk_words, tile_frames_map)
SymbiFlow/prjuray
[ 48, 12, 48, 18, 1594844148 ]
def main(): import argparse parser = argparse.ArgumentParser( description="Generate tilegrid.json from bitstream deltas") parser.add_argument("--verbose", action="store_true", help="") parser.add_argument( "--json-in", default="tiles_basic.json", help="Input .json without addresses") parser.add_argument( "--json-out", default="tilegrid.json", help="Output JSON") args = parser.parse_args() run(args.json_in, args.json_out, verbose=args.verbose)
SymbiFlow/prjuray
[ 48, 12, 48, 18, 1594844148 ]
def join( manager: bool = typer.Option( False, "--manager", show_default=False, help="join new node with manager role" )
rapydo/do
[ 11, 3, 11, 11, 1495096286 ]
def email_sender(self): return NotImplemented
jupiny/EnglishDiary
[ 4, 1, 4, 1, 1466937386 ]
def email_subject(self): return NotImplemented
jupiny/EnglishDiary
[ 4, 1, 4, 1, 1466937386 ]
def email_template(self): return NotImplemented
jupiny/EnglishDiary
[ 4, 1, 4, 1, 1466937386 ]
def _declr(self): addClkRstn(self) self.tick1 = Signal()._m() self.tick2 = Signal()._m() self.tick16 = Signal()._m() self.tick17 = Signal()._m() self.tick34 = Signal()._m() self.tick256 = Signal()._m() self.tick384 = Signal()._m()
Nic30/hwtLib
[ 34, 7, 34, 20, 1468185180 ]
def _declr(self): addClkRstn(self) self.tick1 = HandshakeSync()._m() self.tick2 = HandshakeSync()._m() self.tick16 = HandshakeSync()._m() self.tick17 = HandshakeSync()._m() self.tick34 = HandshakeSync()._m() self.tick256 = HandshakeSync()._m() self.tick384 = HandshakeSync()._m() self.core = TimerInfoTest()
Nic30/hwtLib
[ 34, 7, 34, 20, 1468185180 ]
def _declr(self): addClkRstn(self) self.period = VectSignal(10) self.en = Signal() self.rstCntr = Signal() self.cntr0 = Signal()._m() self.cntr1 = Signal()._m()
Nic30/hwtLib
[ 34, 7, 34, 20, 1468185180 ]
def tearDown(self): self.rmSim() SimTestCase.tearDown(self)
Nic30/hwtLib
[ 34, 7, 34, 20, 1468185180 ]
def test_dynamic_simple(self): u = DynamicCounterInstancesExample() self.compileSimAndStart(u) u.en._ag.data.append(1) u.rstCntr._ag.data.extend([0, 0, 0, 0, 1, 1, 1, 0]) u.period._ag.data.append(5) self.runSim(200 * Time.ns) self.assertValSequenceEqual( u.cntr0._ag.data, [0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0]) self.assertValSequenceEqual( u.cntr1._ag.data, [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0])
Nic30/hwtLib
[ 34, 7, 34, 20, 1468185180 ]
def test_upgrade_atac_alignment_enrichment_quality_metric_1_2( upgrader, atac_alignment_enrichment_quality_metric_1
ENCODE-DCC/encoded
[ 104, 54, 104, 70, 1354841541 ]
def __init__(self, config, handlers): super(KorrekturApp, self).__init__(handlers) for handler in handlers: handler[1].config = config self.config = config
hhucn/netsec-uebungssystem
[ 1, 2, 1, 6, 1412599680 ]
def users(self): return self.config('korrektoren')
hhucn/netsec-uebungssystem
[ 1, 2, 1, 6, 1412599680 ]
def create_acceleration_pc_fixtures(request): """ Parameterized Acceleration path constraint. Return: ------- data: A tuple. Contains path, ss, alim. pc: A `PathConstraint`. """ dof = request.param if dof == 1: # Scalar pi = ta.PolynomialPath([1, 2, 3]) # 1 + 2s + 3s^2 ss = np.linspace(0, 1, 3) alim = (np.r_[-1., 1]).reshape(1, 2) # Scalar case accel_const = constraint.JointAccelerationConstraint(alim, constraint.DiscretizationType.Collocation) data = (pi, ss, alim) return data, accel_const if dof == 2: coeff = [[1., 2, 3], [-2., -3., 4., 5.]] pi = ta.PolynomialPath(coeff) ss = np.linspace(0, 0.75, 4) alim = np.array([[-1., 2], [-2., 2]]) accel_const = constraint.JointAccelerationConstraint(alim, constraint.DiscretizationType.Collocation) data = (pi, ss, alim) return data, accel_const if dof == 6: np.random.seed(10) N = 20 way_pts = np.random.randn(10, 6) pi = ta.SplineInterpolator(np.linspace(0, 1, 10), way_pts) ss = np.linspace(0, 1, N + 1) vlim_ = np.random.rand(6) alim = np.vstack((-vlim_, vlim_)).T accel_const = constraint.JointAccelerationConstraint(alim, constraint.DiscretizationType.Collocation) data = (pi, ss, alim) return data, accel_const if dof == '6d': np.random.seed(10) N = 20 way_pts = np.random.randn(10, 6) pi = ta.SplineInterpolator(np.linspace(0, 1, 10), way_pts) ss = np.linspace(0, 1, N + 1) alim_s = np.random.rand(6) alim = np.vstack((-alim_s, alim_s)).T accel_const = constraint.JointAccelerationConstraint(alim_s, constraint.DiscretizationType.Collocation) data = (pi, ss, alim) return data, accel_const
hungpham2511/toppra
[ 434, 141, 434, 22, 1500429799 ]
def test_constraint_params(accel_constraint_setup): """ Test constraint satisfaction with cvxpy. """ (path, ss, alim), accel_const = accel_constraint_setup # An user of the class a, b, c, F, g, ubound, xbound = accel_const.compute_constraint_params(path, ss) assert xbound is None N = ss.shape[0] - 1 dof = path.dof ps = path(ss, 1) pss = path(ss, 2) F_actual = np.vstack((np.eye(dof), - np.eye(dof))) g_actual = np.hstack((alim[:, 1], - alim[:, 0])) npt.assert_allclose(F, F_actual) npt.assert_allclose(g, g_actual) for i in range(0, N + 1): npt.assert_allclose(a[i], ps[i]) npt.assert_allclose(b[i], pss[i]) npt.assert_allclose(c[i], np.zeros_like(ps[i])) assert ubound is None assert xbound is None
hungpham2511/toppra
[ 434, 141, 434, 22, 1500429799 ]
def star_topology(random, population, args): """Returns the neighbors using a star topology.
aarongarrett/inspyred
[ 175, 54, 175, 11, 1331181888 ]
def ring_topology(random, population, args): """Returns the neighbors using a ring topology.
aarongarrett/inspyred
[ 175, 54, 175, 11, 1331181888 ]
def rgb_to_cairo(rgb): if len(rgb) == 4: r,g,b,a = rgb return (r / 255.0, g / 255.0, b / 255.0, a / 255.0) else: r,g,b = rgb return (r / 255.0, g / 255.0, b / 255.0, 1.0)
SymbiFlow/symbolator
[ 12, 3, 12, 4, 1594952177 ]
def cairo_line_cap(line_cap): if line_cap == 'round': return cairo.LINE_CAP_ROUND elif line_cap == 'square': return cairo.LINE_CAP_SQUARE else: return cairo.LINE_CAP_BUTT
SymbiFlow/symbolator
[ 12, 3, 12, 4, 1594952177 ]
def __init__(self, fname, def_styles, padding=0, scale=1.0): BaseSurface.__init__(self, fname, def_styles, padding, scale) self.ctx = None
SymbiFlow/symbolator
[ 12, 3, 12, 4, 1594952177 ]
def text_bbox(self, text, font_params, spacing=0): return CairoSurface.cairo_text_bbox(text, font_params, spacing, self.scale)
SymbiFlow/symbolator
[ 12, 3, 12, 4, 1594952177 ]
def cairo_text_bbox(text, font_params, spacing=0, scale=1.0): surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, 8, 8) ctx = cairo.Context(surf) # The scaling must match the final context. # If not there can be a mismatch between the computed extents here # and those generated for the final render. ctx.scale(scale, scale) font = cairo_font(font_params) if use_pygobject: status, attrs, plain_text, _ = pango.parse_markup(text, len(text), '\0') layout = pangocairo.create_layout(ctx) pctx = layout.get_context() fo = cairo.FontOptions() fo.set_antialias(cairo.ANTIALIAS_SUBPIXEL) pangocairo.context_set_font_options(pctx, fo) layout.set_font_description(font) layout.set_spacing(spacing * pango.SCALE) layout.set_text(plain_text, len(plain_text)) layout.set_attributes(attrs) li = layout.get_iter() # Get first line of text baseline = li.get_baseline() / pango.SCALE re = layout.get_pixel_extents()[1] # Get logical extents extents = (re.x, re.y, re.x + re.width, re.y + re.height) else: # pyGtk attrs, plain_text, _ = pango.parse_markup(text) pctx = pangocairo.CairoContext(ctx) pctx.set_antialias(cairo.ANTIALIAS_SUBPIXEL) layout = pctx.create_layout() layout.set_font_description(font) layout.set_spacing(spacing * pango.SCALE) layout.set_text(plain_text) layout.set_attributes(attrs) li = layout.get_iter() # Get first line of text baseline = li.get_baseline() / pango.SCALE #print('@@ EXTENTS:', layout.get_pixel_extents()[1], spacing) extents = layout.get_pixel_extents()[1] # Get logical extents return [extents[0], extents[1], extents[2], extents[3], baseline]
SymbiFlow/symbolator
[ 12, 3, 12, 4, 1594952177 ]
def draw_text(x, y, text, font, text_color, spacing, c): c.save() c.set_source_rgba(*rgb_to_cairo(text_color)) font = cairo_font(font) c.translate(x, y) if use_pygobject: status, attrs, plain_text, _ = pango.parse_markup(text, len(text), '\0') layout = pangocairo.create_layout(c) pctx = layout.get_context() fo = cairo.FontOptions() fo.set_antialias(cairo.ANTIALIAS_SUBPIXEL) pangocairo.context_set_font_options(pctx, fo) layout.set_font_description(font) layout.set_spacing(spacing * pango.SCALE) layout.set_text(plain_text, len(plain_text)) layout.set_attributes(attrs) pangocairo.update_layout(c, layout) pangocairo.show_layout(c, layout) else: # pyGtk attrs, plain_text, _ = pango.parse_markup(text) pctx = pangocairo.CairoContext(c) pctx.set_antialias(cairo.ANTIALIAS_SUBPIXEL) layout = pctx.create_layout() layout.set_font_description(font) layout.set_spacing(spacing * pango.SCALE) layout.set_text(plain_text) layout.set_attributes(attrs) pctx.update_layout(layout) pctx.show_layout(layout) c.restore()
SymbiFlow/symbolator
[ 12, 3, 12, 4, 1594952177 ]
def __init__( self, plotly_name="outlinecolor", parent_name="choropleth.colorbar", **kwargs
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def run_pydoc(module_name, *args): """ Runs pydoc on the specified module. Returns the stripped output of pydoc. """ cmd = [sys.executable, pydoc.__file__, " ".join(args), module_name] output = subprocess.Popen(cmd, stdout=subprocess.PIPE).stdout.read() return output.strip()
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def get_pydoc_html(module): "Returns pydoc generated output as html" doc = pydoc.HTMLDoc() output = doc.docmodule(module) loc = doc.getdocloc(pydoc_mod) or "" if loc: loc = "<br><a href=\"" + loc + "\">Module Docs</a>" return output.strip(), loc
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def get_pydoc_text(module): "Returns pydoc generated output as text" doc = pydoc.TextDoc() loc = doc.getdocloc(pydoc_mod) or "" if loc: loc = "\nMODULE DOCS\n " + loc + "\n"
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def print_diffs(text1, text2): "Prints unified diffs for two texts" lines1 = text1.splitlines(True) lines2 = text2.splitlines(True) diffs = difflib.unified_diff(lines1, lines2, n=0, fromfile='expected', tofile='got') print '\n' + ''.join(diffs)
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_html_doc(self): result, doc_loc = get_pydoc_html(pydoc_mod) mod_file = inspect.getabsfile(pydoc_mod) if sys.platform == 'win32': import nturl2path mod_url = nturl2path.pathname2url(mod_file) else: mod_url = mod_file expected_html = expected_html_pattern % (mod_url, mod_file, doc_loc) if result != expected_html: print_diffs(expected_html, result) self.fail("outputs are not equal, see diff above")
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_text_doc(self): result, doc_loc = get_pydoc_text(pydoc_mod) expected_text = expected_text_pattern % \ (inspect.getabsfile(pydoc_mod), doc_loc) if result != expected_text: print_diffs(expected_text, result) self.fail("outputs are not equal, see diff above")
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_issue8225(self): # Test issue8225 to ensure no doc link appears for xml.etree result, doc_loc = get_pydoc_text(xml.etree) self.assertEqual(doc_loc, "", "MODULE DOCS incorrectly includes a link")
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_not_here(self): missing_module = "test.i_am_not_here" result = run_pydoc(missing_module) expected = missing_pattern % missing_module self.assertEqual(expected, result, "documentation for missing module found")
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_badimport(self): # This tests the fix for issue 5230, where if pydoc found the module # but the module had an internal import error pydoc would report no doc # found. modname = 'testmod_xyzzy' testpairs = ( ('i_am_not_here', 'i_am_not_here'), ('test.i_am_not_here_either', 'i_am_not_here_either'), ('test.i_am_not_here.neither_am_i', 'i_am_not_here.neither_am_i'), ('i_am_not_here.{0}'.format(modname), 'i_am_not_here.{0}'.format(modname)), ('test.{0}'.format(modname), modname), )
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def newdirinpath(dir): os.mkdir(dir) sys.path.insert(0, dir) yield sys.path.pop(0) rmtree(dir)
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_input_strip(self): missing_module = " test.i_am_not_here " result = run_pydoc(missing_module) expected = missing_pattern % missing_module.strip() self.assertEqual(expected, result, "white space was not stripped from module name " "or other error output mismatch")
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_stripid(self): # test with strings, other implementations might have different repr() stripid = pydoc.stripid # strip the id self.assertEqual(stripid('<function stripid at 0x88dcee4>'), '<function stripid>') self.assertEqual(stripid('<function stripid at 0x01F65390>'), '<function stripid>') # nothing to strip, return the same text self.assertEqual(stripid('42'), '42') self.assertEqual(stripid("<type 'exceptions.Exception'>"), "<type 'exceptions.Exception'>")
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_module(self): # Check that pydocfodder module can be described from test import pydocfodder doc = pydoc.render_doc(pydocfodder) self.assert_("pydocfodder" in doc)
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_classic_class(self): class C: "Classic class" c = C() self.assertEqual(pydoc.describe(C), 'class C') self.assertEqual(pydoc.describe(c), 'instance of C') expected = 'instance of C in module %s' % __name__ self.assert_(expected in pydoc.render_doc(c))
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_class(self): class C(object): "New-style class" c = C()
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def test_main(): test.test_support.run_unittest(PyDocDocTest, TestDescriptions)
babyliynfg/cross
[ 75, 39, 75, 4, 1489383147 ]
def __init__( self, plotly_name="size", parent_name="scatterternary.selected.marker", **kwargs
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def __init__(self,domain='r-kvstore.aliyuncs.com',port=80): RestApi.__init__(self,domain, port) self.InstanceIds = None self.InstanceStatus = None self.PageNumber = None self.PageSize = None self.RegionId = None
francisar/rds_manager
[ 11, 11, 11, 1, 1448422655 ]
def __init__( self, plotly_name="thicknessmode", parent_name="surface.colorbar", **kwargs
plotly/plotly.py
[ 13052, 2308, 13052, 1319, 1385013188 ]
def __init__( self, plotly_name="alignsrc", parent_name="contour.hoverlabel", **kwargs
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('discovery', sa.Column('num_datasets', sa.Integer(), nullable=True)) # ### end Alembic commands ###
ahoarfrost/metaseek
[ 15, 1, 15, 11, 1473026988 ]
def convert_DBUS_to_python(val): ''' quick hack to convert DBUS types to python types ''' if isinstance(val, (str, six.text_type,)): return str(val) elif isinstance(val, (int,)): return int(val) elif isinstance(val, (dict,)): return convert_DBUSDictionary_to_dict(val) elif isinstance(val, (list,)): return convert_DBUSArray_to_tuple(val) elif isinstance(val, (tuple,)): return val[1] elif isinstance(val, (float,)): return float(val) else: raise TypeError("Unknown type '%s': '%r'!" % (str(type(val)), repr(val)))
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def convert_DBUSDictionary_to_dict(dbusdict): return {convert_DBUS_to_python(k): convert_DBUS_to_python(dbusdict[k]) for k in dbusdict}
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def uPowerDeviceGetAll(conn, device): ''' Utility method that uses the given DBUS connection to call the UPower.GetAll method on the UPower device specified and returns pure python data. :param conn: DBUS connection :param device: the device ''' log.debug("uPowerDeviceGetAll %s", device) return convert_DBUS_to_python(conn.call_method(device, member='GetAll', interface=DBUS_PROP_NAME, destination=UPOWER_NAME, format='s', args=(UPOWER_DEVICE_IFACE,) ).get_args()[0])
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def __init__(self, connect, devices): ''' A DBUS signal handler class for the org.freedesktop.UPower.Device 'Changed' event. To re-read the device data, a DBUS connection is required. This is established when an event is fired using the provided connect method. Essentially, this is a cluttered workaround for a bizarre object design and use of decorators in the tdbus library. :param connect: a DBUS system bus connection factory :param devices: the devices to watch ''' self.connect = connect self.device_paths = devices log.debug('Installing signal handler for devices: %r', devices) self._observers = {} super(UPowerDeviceHandler, self).__init__()
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def notify_observers(self, device=None, attributes=None): """notify observers """ log.debug("%s %r", device, attributes) for observer, devices in list(self._observers.items()): #log.debug("trying to notify the observer") if devices is None or device is None or device in devices: try: observer(self, device, attributes) except (Exception,) as ex: # pylint: disable=W0703 self.unregister_observer(observer) errmsg = "Exception in message dispatch: Handler '{0}'" + \ " unregistered for device '{1}' ".format( observer.__class__.__name__, device) log.error(errmsg, exc_info=ex)
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def Changed(self, message): device = message.get_path() if device in self.device_paths: log.debug('signal received: %s, args = %r', message.get_member(), message.get_args()) conn = self.connect() self.notify_observers(device, uPowerDeviceGetAll(conn, device)) conn.close()
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def upower_present(connect): conn = connect() result = conn.call_method(tdbus.DBUS_PATH_DBUS, 'ListNames', tdbus.DBUS_INTERFACE_DBUS, destination=tdbus.DBUS_SERVICE_DBUS) conn.close() # see if UPower is in the known services: return UPOWER_NAME in (name for name in result.get_args()[0] if not name.startswith(':'))
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def is_rechargeable(conn, device): log.debug("testing IsRechargeable for '%s'", device) return uPowerDeviceGet(conn, device, 'IsRechargeable')
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def main(): logging.basicConfig(level=logging.DEBUG) if not upower_present(connect_dbus_system): raise EnvironmentError("DBUS connection to UPower impossible") conn = connect_dbus_system() conn.add_handler(UPowerDeviceHandler(connect_dbus_system, set(ibatteries(conn)))) conn.subscribe_to_signals() # basic select() loop, i.e. we assume there is no event loop conn.dispatch()
infothrill/flask-socketio-dbus-demo
[ 8, 4, 8, 12, 1399972424 ]
def display(self): if self.username in MAINTAINERS: return "* [#{}] {}"\ .format(self.pull_request_id, self.description) else: return "* [#{}] {} (by [@{}])"\ .format(self.pull_request_id, self.description, self.username)
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def __init__(self, header: str): self.header = header self.items = []
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def display(self): return """## {}\n\n""".format(self.header) + "\n\n".join(map(lambda l: l.display(), self.items))
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def __init__(self, milestone_id=None): self.milestone_id = milestone_id self.labels = [] self.sections = {} self.contributors = set() self.__add_section("feature", ChangelogSection("New Features")) self.__add_section("performance", ChangelogSection("Performance Improvements")) self.__add_section("fix", ChangelogSection("Fixes")) self.__add_section("internal", ChangelogSection("Internal Improvements"))
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def add_item(self, label: str, item: ChangelogItem): section = self.sections.get(label) if section is not None: section.add_item(item) if item.username not in MAINTAINERS: self.contributors.add(item.username)
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def collect_changelog(repo: Repository, milestone: Milestone): print(f"Collecting changelog issues for `{milestone.title}` milestone") changelog = Changelog(milestone.number) issues = repo.get_issues(milestone=milestone, state="all") comment_pattern = re.compile("<!--.*-->", re.RegexFlag.DOTALL) changelog_description_pattern = re.compile("[Cc]hangelog:\\s*(?P<description>([^\n]+\n?)*)") for issue in issues: if issue.pull_request is None: continue labels: Set[str] = set(map(lambda l: l.name, issue.labels)) if len(labels) == 0: continue if issue.body is not None: issue_text = re.sub(comment_pattern, "", issue.body).replace("\r\n", "\n") else: issue_text = "" result = re.search(changelog_description_pattern, issue_text) if result is not None: description = result.group("description").strip().rstrip(".") else: description = issue.title changelog_item = ChangelogItem(issue.number, description, issue.user.login) for label in labels: changelog.add_item(label, changelog_item) return changelog
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def construct_repo(args: argparse.Namespace) -> Repository: if args.token is not None: login_or_token = args.token password = None else: login_or_token = args.login password = args.password g = Github(login_or_token, password) return g.get_repo(PLUGIN_REPO)
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def contributors(): last_post = "_posts/" + sorted(os.listdir("_posts"))[-1] with open(last_post) as f: text = f.read() names = sorted({n[2:-1] for n in re.findall(r"\[@[^]]+]", text)}) with open(last_post) as f: old_text = f.read() with open(last_post, 'a') as f: f.write("\n") for name in names: line = contributor_url(name) if line not in old_text: f.write(line)
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def contributor_list(args: argparse.Namespace) -> None: if args.list is None: raise ValueError("list flag should be set") first = args.list[0] last = args.list[1] if first >= last: raise ValueError("`first` should be less than `last`") repo = construct_repo(args) milestones = repo.get_milestones(state="all", sort="due_on") versions = {f"v{i}" for i in range(first, last + 1)} contributors = set() for milestone in milestones: if milestone.title in versions: milestone_contributors = collect_contributors(repo, milestone) print(f"Milestone {milestone.title}: {len(milestone_contributors)} external contributors") contributors.update(milestone_contributors) print() for c in sorted(contributors): print(c)
intellij-rust/intellij-rust.github.io
[ 70, 24, 70, 3, 1467554026 ]
def __init__(self, report): self.data = report self.type = self.data["reportMetadata"]["reportFormat"] self.has_details = self.data["hasDetailRows"]
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def _flatten_record(record): return [field["label"] for field in record]
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def records(self): """ Return a list of all records included in the report. If detail rows are not included in the report a ValueError is returned instead. Returns ------- records: list """ if not self.has_details: raise ValueError('Report does not include details so cannot access individual records') records = [] fact_map = self.data["factMap"] for group in fact_map.values(): rows = group["rows"] group_records = (self._flatten_record(row["dataCells"]) for row in rows) for record in group_records: records.append(record) return records
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def __init__(self, report): super().__init__(report) self.data = report self._check_type()
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def get_col_total(self, col_label, default=None): """ Return the total for the specified column. The default arg makes it possible to specify the return value if the column label is not found. Parameters ---------- col_label: string default: string, optional, default None If column is not found determines the return value Returns ------- total: int """ grp_across_list = self.data["groupingsAcross"]["groupings"] col_dict = {grp['label']: int(grp['key']) for grp in grp_across_list} try: col_key = col_dict[col_label] return self.data["factMap"]['T!{}'.format(col_key)]["aggregates"][0]["value"] except KeyError: return default
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def _convert_parameter(parameter): if type(parameter) is str: new_parameter = [parameter] elif parameter is None: new_parameter = [] elif type(parameter) is list: new_parameter = parameter else: raise ValueError return new_parameter
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def _get_subgroup_index(group_above, subgroup_name): subgroups_with_index = {subgroup['label']: index for index, subgroup in enumerate(group_above)} index = subgroups_with_index[subgroup_name] return index
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def _get_static_key(self, groups_of_interest, static_grouping_key): grouping_depth = len(groups_of_interest) group_index = grouping_depth - 1 top_grouping = self.data[static_grouping_key]["groupings"] grouping = self._get_grouping(groups_of_interest, top_grouping, grouping_depth) keys = {group['label']: group['key'] for group in grouping} static_key = keys[groups_of_interest[group_index]] return static_key
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def _build_keys(self, static_groups_of_interest, dynamic_groups_of_interest, static_grouping_key, dynamic_grouping_key): static_key = self._get_static_key(static_groups_of_interest, static_grouping_key) dynamic_keys = self._get_dynamic_keys(dynamic_groups_of_interest, dynamic_grouping_key) keys = [] if static_grouping_key == "groupingsAcross": for el in dynamic_keys["keys"]: key = "{}!{}".format(el, static_key) keys.append(key) else: for el in dynamic_keys["keys"]: key = "{}!{}".format(static_key, el) keys.append(key) return {"keys": keys, "labels": dynamic_keys["labels"]}
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def series_down(self, column_groups, row_groups=None, value_position=0): """ Return selected slice of a report on a vertical axis Parameters ---------- column_groups: string or list The selected column to return series from If multiple grouping levels a list is used to identify grouping of interest row_groups: string, list or None, optional, default None Limits rows included in Series to those within specified grouping value_position: int, default 0 Index of value of interest, if only one value included by default will select correct value Returns ------- series: dict, {label: value, ...} """ static_grouping_key = "groupingsAcross" dynamic_grouping_key = "groupingsDown" return self._series(column_groups, static_grouping_key, dynamic_grouping_key, dynamic_groups_of_interest=row_groups, value_position=value_position)
cghall/salesforce-reporting
[ 22, 11, 22, 8, 1448899594 ]
def setUp(self): # setting up our random data-set rng = np.random.RandomState(42) # D1 = train machines; D2 = create COBRA; D3 = calibrate epsilon, alpha; D4 = testing n_features = 20 D1, D2, D3, D4 = 200, 200, 200, 200 D = D1 + D2 + D3 + D4 X = rng.uniform(-1, 1, D * n_features).reshape(D, n_features) Y = np.power(X[:,1], 2) + np.power(X[:,3], 3) + np.exp(X[:,10]) # training data-set X_train = X[:D1 + D2] X_test = X[D1 + D2 + D3:D1 + D2 + D3 + D4] # for testing Y_train = Y[:D1 + D2] Y_test = Y[D1 + D2 + D3:D1 + D2 + D3 + D4] cobra = Cobra(random_state=0, epsilon=0.5) cobra.fit(X_train, Y_train) ewa = Ewa(random_state=0) ewa.fit(X_train, Y_train) self.test_data = X_test self.test_response = Y_test self.cobra = cobra self.ewa = ewa self.cobra_diagnostics = Diagnostics(self.cobra, random_state=0) self.cobra_diagnostics_ewa = Diagnostics(self.ewa, random_state=0)
bhargavvader/pycobra
[ 113, 22, 113, 3, 1490467649 ]
def test_alpha_grid(self): (alpha, epsilon), mse = self.cobra_diagnostics.optimal_alpha_grid(self.test_data[0], self.test_response[0]) expected_alpha, expected_mse = 1, 0.01331659692231877 self.assertEqual(expected_alpha, alpha) self.assertAlmostEqual(expected_mse, mse[0], places=3)
bhargavvader/pycobra
[ 113, 22, 113, 3, 1490467649 ]
def test_machines_grid(self): (machines, epsilon), mse = self.cobra_diagnostics.optimal_machines_grid(self.test_data[0], self.test_response[0]) expected_machines, expected_mse = ('svm','ridge'), 1.92151481985802e-05 self.assertEqual(sorted(expected_machines), sorted(machines)) self.assertAlmostEqual(expected_mse, mse[0], places=3)
bhargavvader/pycobra
[ 113, 22, 113, 3, 1490467649 ]
def test_epsilon(self): epsilon, mse = self.cobra_diagnostics.optimal_epsilon(self.test_data, self.test_response) expected_epsilon= 0.3709790878655187 self.assertAlmostEqual(expected_epsilon, epsilon, places=3)
bhargavvader/pycobra
[ 113, 22, 113, 3, 1490467649 ]
def test_beta(self): beta, mse = self.cobra_diagnostics_ewa.optimal_beta(self.test_data, self.test_response) expected_beta, expected_mse = 0.1, 0.07838339131485009 self.assertAlmostEqual(expected_beta, beta, places=3) self.assertAlmostEqual(expected_mse, mse, places=3)
bhargavvader/pycobra
[ 113, 22, 113, 3, 1490467649 ]