code
stringlengths 52
7.75k
| docs
stringlengths 1
5.85k
|
---|---|
def _revcheck(self, func, version):
'''
Internal function to see if a version is func than what we have
determined to be talking to. This is very useful for newer API calls
to make sure we don't accidentally make a call to something that
doesnt exist.
'''
current = self._revint(self.version)
check = self._revint(version)
if func in ('lt', '<=',):
return check <= current
elif func in ('gt', '>='):
return check >= current
elif func in ('eq', '=', 'equals'):
return check == current
else:
return Falsf _revcheck(self, func, version):
'''
Internal function to see if a version is func than what we have
determined to be talking to. This is very useful for newer API calls
to make sure we don't accidentally make a call to something that
doesnt exist.
'''
current = self._revint(self.version)
check = self._revint(version)
if func in ('lt', '<=',):
return check <= current
elif func in ('gt', '>='):
return check >= current
elif func in ('eq', '=', 'equals'):
return check == current
else:
return False | Internal function to see if a version is func than what we have
determined to be talking to. This is very useful for newer API calls
to make sure we don't accidentally make a call to something that
doesnt exist. |
def _build_xrefs(self):
'''
Internal function to populate the xrefs list with the external
references to be used in searching plugins and potentially
other functions as well.
'''
xrefs = set()
plugins = self.plugins()
for plugin in plugins:
for xref in plugin['xrefs'].split(', '):
xrf = xref.replace('-', '_').split(':')[0]
if xrf is not '':
xrefs.add(xrf)
self._xrefs = list(xrefsf _build_xrefs(self):
'''
Internal function to populate the xrefs list with the external
references to be used in searching plugins and potentially
other functions as well.
'''
xrefs = set()
plugins = self.plugins()
for plugin in plugins:
for xref in plugin['xrefs'].split(', '):
xrf = xref.replace('-', '_').split(':')[0]
if xrf is not '':
xrefs.add(xrf)
self._xrefs = list(xrefs) | Internal function to populate the xrefs list with the external
references to be used in searching plugins and potentially
other functions as well. |
def login(self, user, passwd):
data = self.raw_query('auth', 'login',
data={'username': user, 'password': passwd})
self._token = data["token"]
self._user = data | login user passwd
Performs the login operation for Security Center, storing the token
that Security Center has generated for this login session for future
queries. |
def credential_update(self, cred_id, **options):
payload = None
# First we pull the credentials and populate the payload if we
# find a match.
for cred in self.credentials()['credentials']:
if cred['id'] == str(cred_id):
payload = {
'id': cred_id,
'type': cred['type'],
'name': cred['name'],
'description': cred['description'],
'visibility': cred['visibility'],
'group': cred['group'],
'users': cred['users'],
}
if cred['type'] == 'kerberos':
payload['ip'] = cred['ip']
payload['port'] = cred['port']
payload['protocol'] = cred['protocol']
payload['realm'] = cred['realm']
if cred['type'] == 'snmp':
payload['communityString'] = cred['communityString']
if cred['type'] == 'ssh':
payload['username'] = cred['username']
payload['publickey'] = cred['publickey']
payload['privatekey'] = cred['privatekey']
payload['priviledgeEscalation'] = cred['priviledgeEscalation']
payload['escalationUsername'] = cred['escalationUsername']
if cred['type'] == 'windows':
payload['username'] = cred['username']
payload['domain'] = cred['domain']
if payload is None:
raise APIError(13, 'cred_id %s does not exist' % cred_id)
for option in options:
payload[option] = options[option]
return self.raw_query('credential', 'edit', data=payload) | credential_update cred_id **options
Updates the specified values of the credential ID specified. |
def credential_share_simulate(self, cred_id, *user_ids):
return self.raw_query("credential", "shareSimulate", data={
'id': cred_id,
'users': [{'id': i} for i in user_ids],
}) | Shares a given credential to the specified Users.
:param cred_id: Credential ID
:param user_ids: List of User IDs |
def credential_share(self, cred_id, *user_ids):
return self.raw_query("credential", "share", data={
'id': cred_id,
'users': [{'id': i} for i in user_ids],
}) | Shares a given credential to the specified Users.
:param cred_id: Credential ID
:param user_ids: List of User IDs |
def credential_delete_simulate(self, *ids):
return self.raw_query("credential", "deleteSimulate", data={
"credentials": [{"id": str(id)} for id in ids]
}) | Show the relationships and dependencies for one or more credentials.
:param ids: one or more credential ids |
def credential_delete(self, *ids):
return self.raw_query("credential", "delete", data={
"credentials": [{"id": str(id)} for id in ids]
}) | Delete one or more credentials.
:param ids: one or more credential ids |
def plugin_counts(self):
ret = {
'total': 0,
}
# As ususal, we need data before we can actually do anything ;)
data = self.raw_query('plugin', 'init')
# For backwards compatability purposes, we will be handling this a bit
# differently than I would like. We are going to check to see if each
# value exists and override the default value of 0. The only value that
# I know existed in bost 4.2 and 4.4 is pluginCount, the rest aren't
# listed in the API docs, however return back from my experimentation.
ret['total'] = data['pluginCount']
if 'lastUpdates' in data:
for item in ['active', 'passive', 'compliance', 'custom', 'event']:
itemdata = {}
if item in data['lastUpdates']:
itemdata = data['lastUpdates'][item]
if item in data:
itemdata['count'] = data[item]
else:
itemdata['count'] = 0
ret[item] = itemdata
return ret | plugin_counts
Returns the plugin counts as dictionary with the last updated info if
its available. |
def ip_info(self, ip, repository_ids=None):
if not repository_ids:
repository_ids = []
repos = []
for rid in repository_ids:
repos.append({'id': rid})
return self.raw_query('vuln', 'getIP', data={
'ip': ip, 'repositories': repos}) | ip_info
Returns information about the IP specified in the repository ids
defined. |
def scan_list(self, start_time=None, end_time=None, **kwargs):
try:
end_time = datetime.utcfromtimestamp(int(end_time))
except TypeError:
if end_time is None:
end_time = datetime.utcnow()
try:
start_time = datetime.utcfromtimestamp(int(start_time))
except TypeError:
if start_time is None:
start_time = end_time - timedelta(days=30)
data = {"startTime": calendar.timegm(start_time.utctimetuple()),
"endTime": calendar.timegm(end_time.utctimetuple())}
data.update(kwargs)
result = self.raw_query("scanResult", "getRange", data=data)
return result["scanResults"] | List scans stored in Security Center in a given time range.
Time is given in UNIX timestamps, assumed to be UTC. If a `datetime` is
passed it is converted. If `end_time` is not specified it is NOW. If
`start_time` is not specified it is 30 days previous from `end_time`.
:param start_time: start of range to filter
:type start_time: date, datetime, int
:param end_time: end of range to filter
:type start_time: date, datetime, int
:return: list of dictionaries representing scans |
def scan_download(self, scan_id, format='v2'):
payload = {
'downloadType': format,
'scanResultID': scan_id,
}
data = self.raw_query('scanResult', 'download', data=payload, dejson=False)
bobj = StringIO()
bobj.write(data)
zfile = ZipFile(bobj)
return zfile.read(zfile.namelist()[0]) | scan_download scan_id [format]
Will download an individual scan and return a string with the results. |
def dashboard_import(self, name, fileobj):
data = self._upload(fileobj)
return self.raw_query('dashboard', 'importTab', data={
'filename': data['filename'],
'name': name,
}) | dashboard_import Dashboard_Name, filename
Uploads a dashboard template to the current user's dashboard tabs.
UN-DOCUMENTED CALL: This function is not considered stable. |
def report_import(self, name, filename):
data = self._upload(filename)
return self.raw_query('report', 'import', data={
'filename': data['filename'],
'name': name,
}) | report_import Report_Name, filename
Uploads a report template to the current user's reports
UN-DOCUMENTED CALL: This function is not considered stable. |
def group_add(self, name, restrict, repos, lces=[], assets=[], queries=[],
policies=[], dashboards=[], credentials=[], description=''):
'''group_add name, restrict, repos
'''
return self.raw_query('group', 'add', data={
'lces': [{'id': i} for i in lces],
'assets': [{'id': i} for i in assets],
'queries': [{'id': i} for i in queries],
'policies': [{'id': i} for i in policies],
'dashboardTabs': [{'id': i} for i in dashboards],
'credentials': [{'id': i} for i in credentials],
'repositories': [{'id': i} for i in repos],
'definingAssets': [{'id': i} for i in restrict],
'name': name,
'description': description,
'users': [],
'context': ''
}f group_add(self, name, restrict, repos, lces=[], assets=[], queries=[],
policies=[], dashboards=[], credentials=[], description=''):
'''group_add name, restrict, repos
'''
return self.raw_query('group', 'add', data={
'lces': [{'id': i} for i in lces],
'assets': [{'id': i} for i in assets],
'queries': [{'id': i} for i in queries],
'policies': [{'id': i} for i in policies],
'dashboardTabs': [{'id': i} for i in dashboards],
'credentials': [{'id': i} for i in credentials],
'repositories': [{'id': i} for i in repos],
'definingAssets': [{'id': i} for i in restrict],
'name': name,
'description': description,
'users': [],
'context': ''
}) | group_add name, restrict, repos |
def get_geo_info(filename, band=1):
''' Gets information from a Raster data set
'''
sourceds = gdal.Open(filename, GA_ReadOnly)
ndv = sourceds.GetRasterBand(band).GetNoDataValue()
xsize = sourceds.RasterXSize
ysize = sourceds.RasterYSize
geot = sourceds.GetGeoTransform()
projection = osr.SpatialReference()
projection.ImportFromWkt(sourceds.GetProjectionRef())
datatype = sourceds.GetRasterBand(band).DataType
datatype = gdal.GetDataTypeName(datatype)
return ndv, xsize, ysize, geot, projection, datatypf get_geo_info(filename, band=1):
''' Gets information from a Raster data set
'''
sourceds = gdal.Open(filename, GA_ReadOnly)
ndv = sourceds.GetRasterBand(band).GetNoDataValue()
xsize = sourceds.RasterXSize
ysize = sourceds.RasterYSize
geot = sourceds.GetGeoTransform()
projection = osr.SpatialReference()
projection.ImportFromWkt(sourceds.GetProjectionRef())
datatype = sourceds.GetRasterBand(band).DataType
datatype = gdal.GetDataTypeName(datatype)
return ndv, xsize, ysize, geot, projection, datatype | Gets information from a Raster data set |
def create_geotiff(name, Array, driver, ndv, xsize, ysize, geot, projection, datatype, band=1):
'''
Creates new geotiff from array
'''
if isinstance(datatype, np.int) == False:
if datatype.startswith('gdal.GDT_') == False:
datatype = eval('gdal.GDT_'+datatype)
newfilename = name+'.tif'
# Set nans to the original No Data Value
Array[np.isnan(Array)] = ndv
# Set up the dataset
DataSet = driver.Create(newfilename, xsize, ysize, 1, datatype)
# the '1' is for band 1.
DataSet.SetGeoTransform(geot)
DataSet.SetProjection(projection.ExportToWkt())
# Write the array
DataSet.GetRasterBand(band).WriteArray(Array)
DataSet.GetRasterBand(band).SetNoDataValue(ndv)
return newfilenamf create_geotiff(name, Array, driver, ndv, xsize, ysize, geot, projection, datatype, band=1):
'''
Creates new geotiff from array
'''
if isinstance(datatype, np.int) == False:
if datatype.startswith('gdal.GDT_') == False:
datatype = eval('gdal.GDT_'+datatype)
newfilename = name+'.tif'
# Set nans to the original No Data Value
Array[np.isnan(Array)] = ndv
# Set up the dataset
DataSet = driver.Create(newfilename, xsize, ysize, 1, datatype)
# the '1' is for band 1.
DataSet.SetGeoTransform(geot)
DataSet.SetProjection(projection.ExportToWkt())
# Write the array
DataSet.GetRasterBand(band).WriteArray(Array)
DataSet.GetRasterBand(band).SetNoDataValue(ndv)
return newfilename | Creates new geotiff from array |
def load_tiff(file):
ndv, xsize, ysize, geot, projection, datatype = get_geo_info(file)
data = gdalnumeric.LoadFile(file)
data = np.ma.masked_array(data, mask=data == ndv, fill_value=ndv)
return data | Load a geotiff raster keeping ndv values using a masked array
Usage:
data = load_tiff(file) |
def from_file(filename, **kwargs):
ndv, xsize, ysize, geot, projection, datatype = get_geo_info(filename, **kwargs)
data = gdalnumeric.LoadFile(filename, **kwargs)
data = np.ma.masked_array(data, mask=data == ndv, fill_value=ndv)
return GeoRaster(data, geot, nodata_value=ndv, projection=projection, datatype=datatype) | Create a GeoRaster object from a file |
def to_pandas(raster, name='value'):
df = pd.DataFrame(raster.raster)
df = df.stack()
df = df.reset_index()
df.columns = ['row', 'col', name]
df['x'] = df.col.apply(lambda col: raster.geot[0]+(col)*raster.geot[1])
df['y'] = df.row.apply(lambda row: raster.geot[3]+(row)*raster.geot[-1])
return df | Convert GeoRaster to Pandas DataFrame, which can be easily exported to other types of files
The DataFrame has the row, col, value, x, and y values for each cell
Usage:
df = gr.to_pandas(raster) |
def to_geopandas(raster, **kwargs):
df = to_pandas(raster, **kwargs)
df['geometry'] = df.apply(squares, georaster=raster, axis=1)
df = gp.GeoDataFrame(df, crs=from_string(raster.projection.ExportToProj4()))
return df | Convert GeoRaster to GeoPandas DataFrame, which can be easily exported to other types of files
and used to do other types of operations.
The DataFrame has the geometry (Polygon), row, col, value, x, and y values for each cell
Usage:
df = gr.to_geopandas(raster) |
def raster_weights(raster, rook=False, transform='r', **kwargs):
rasterf = raster.flatten()
if len(raster.shape) == 1:
shape = (np.sqrt(raster.shape[0]) * np.array([1,1])).astype(int)
else:
shape = raster.shape
w = pysal.lat2W(*shape, rook=rook, **kwargs)
# Identify missing/no data
if isinstance(rasterf, np.ma.core.MaskedArray):
miss = rasterf.mask
else:
miss = np.logical_or(np.isnan(rasterf), np.isinf(rasterf))
missn = set(np.arange(0, len(miss))[miss])
cneighbors = {}
for key, value in w.neighbors.items():
if key not in missn:
value = list(set(value).difference(missn))
cneighbors[key] = value
w = pysal.W(cneighbors)
w.transform = transform
return w | Construct PySal weights for rasters
It drops weights for all cells that have no data or are Inf/NaN
Usage:
w = raster_weights(raster, rook=False, transform='r', **kwargs)
where
raster: (Masked) Numpy array for which weights are to be constructed
rook: Boolean, type of contiguity. Default is queen. For rook, rook = True
**kwargs are defined by and passed to pysal.lat2W.
See help(pysal.lat2W) |
def map_vector(x, raster, **kvars):
y = raster.copy()
y.raster[y.raster.mask == False] = x
return y | Create new GeoRaster, which has its data replaced by x
Useful to map output of PySal analyses, e.g. spatial autocorrelation values, etc.
Usage: raster2 = map_vector(x, raster)
where
raster: GeoRaster
x: Numpy array of data with same length as non-missing values in raster,
i.e., len(x) == np.sum(raster.mask==False) |
def copy(self):
return GeoRaster(self.raster.copy(), self.geot, nodata_value=self.nodata_value,
projection=self.projection, datatype=self.datatype) | Returns copy of itself |
def plot(self, figsize=None, ax=None, **kwargs):
'''
geo.plot()
Returns plot of raster data
'''
if ax is None:
fig, ax = plt.subplots(figsize=figsize)
ax.set_aspect('equal')
ax.matshow(self.raster, **kwargs)
plt.draw()
return af plot(self, figsize=None, ax=None, **kwargs):
'''
geo.plot()
Returns plot of raster data
'''
if ax is None:
fig, ax = plt.subplots(figsize=figsize)
ax.set_aspect('equal')
ax.matshow(self.raster, **kwargs)
plt.draw()
return ax | geo.plot()
Returns plot of raster data |
def gini(self):
if self.count()>1:
xsort = sorted(self.raster.data[self.raster.mask == False].flatten()) # increasing order
y = np.cumsum(xsort)
B = sum(y) / (y[-1] * len(xsort))
return 1 + 1./len(xsort) - 2*B
else:
return 1 | geo.gini()
Return computed Gini coefficient. |
def apply(self, func, *args, **kwargs):
'''
geo.apply(func, *args, **kwargs)
Returns the value of applying function func on the raster data
func: Python function
*args: Arguments of function
**kwargs: Additional arguments of function
'''
return func(self.raster, *args, **kwargsf apply(self, func, *args, **kwargs):
'''
geo.apply(func, *args, **kwargs)
Returns the value of applying function func on the raster data
func: Python function
*args: Arguments of function
**kwargs: Additional arguments of function
'''
return func(self.raster, *args, **kwargs) | geo.apply(func, *args, **kwargs)
Returns the value of applying function func on the raster data
func: Python function
*args: Arguments of function
**kwargs: Additional arguments of function |
def map_pixel(self, point_x, point_y):
'''
geo.map_pixel(point_x, point_y)
Return value of raster in location
Note: (point_x, point_y) must belong to the geographic coordinate system and
the coverage of the raster
'''
row, col = map_pixel(point_x, point_y,
self.x_cell_size, self.y_cell_size, self.xmin, self.ymax)
try:
return self.raster[row, col]
except:
raise RasterGeoError('Make sure the point belongs to the raster coverage \
and it is in the correct geographic coordinate system.'f map_pixel(self, point_x, point_y):
'''
geo.map_pixel(point_x, point_y)
Return value of raster in location
Note: (point_x, point_y) must belong to the geographic coordinate system and
the coverage of the raster
'''
row, col = map_pixel(point_x, point_y,
self.x_cell_size, self.y_cell_size, self.xmin, self.ymax)
try:
return self.raster[row, col]
except:
raise RasterGeoError('Make sure the point belongs to the raster coverage \
and it is in the correct geographic coordinate system.') | geo.map_pixel(point_x, point_y)
Return value of raster in location
Note: (point_x, point_y) must belong to the geographic coordinate system and
the coverage of the raster |
def map_pixel_location(self, point_x, point_y):
'''
geo.map_pixel(point_x, point_y)
Return value of raster in location
'''
row, col = map_pixel(point_x, point_y, self.x_cell_size, self.y_cell_size,
self.xmin, self.ymax)
return np.array([row, col]f map_pixel_location(self, point_x, point_y):
'''
geo.map_pixel(point_x, point_y)
Return value of raster in location
'''
row, col = map_pixel(point_x, point_y, self.x_cell_size, self.y_cell_size,
self.xmin, self.ymax)
return np.array([row, col]) | geo.map_pixel(point_x, point_y)
Return value of raster in location |
def align(self, alignraster, how=np.mean, cxsize=None, cysize=None):
'''
geo.align(geo2, how=np.mean)
Returns both georasters aligned and with the same pixelsize
'''
return align_georasters(self, alignraster, how=how, cxsize=cxsize, cysize=cysizef align(self, alignraster, how=np.mean, cxsize=None, cysize=None):
'''
geo.align(geo2, how=np.mean)
Returns both georasters aligned and with the same pixelsize
'''
return align_georasters(self, alignraster, how=how, cxsize=cxsize, cysize=cysize) | geo.align(geo2, how=np.mean)
Returns both georasters aligned and with the same pixelsize |
def aggregate(self, block_size):
'''
geo.aggregate(block_size)
Returns copy of raster aggregated to smaller resolution, by adding cells.
'''
raster2 = block_reduce(self.raster, block_size, func=np.ma.sum)
geot = self.geot
geot = (geot[0], block_size[0] * geot[1], geot[2], geot[3], geot[4],
block_size[1] * geot[-1])
return GeoRaster(raster2, geot, nodata_value=self.nodata_value,\
projection=self.projection, datatype=self.datatypef aggregate(self, block_size):
'''
geo.aggregate(block_size)
Returns copy of raster aggregated to smaller resolution, by adding cells.
'''
raster2 = block_reduce(self.raster, block_size, func=np.ma.sum)
geot = self.geot
geot = (geot[0], block_size[0] * geot[1], geot[2], geot[3], geot[4],
block_size[1] * geot[-1])
return GeoRaster(raster2, geot, nodata_value=self.nodata_value,\
projection=self.projection, datatype=self.datatype) | geo.aggregate(block_size)
Returns copy of raster aggregated to smaller resolution, by adding cells. |
def block_reduce(self, block_size, how=np.ma.mean):
'''
geo.block_reduce(block_size, how=func)
Returns copy of raster aggregated to smaller resolution, by adding cells.
Default: func=np.ma.mean
'''
raster2 = block_reduce(self.raster, block_size, func=how)
geot = self.geot
geot = (geot[0], block_size[0] * geot[1], geot[2], geot[3], geot[4],
block_size[1] * geot[-1])
return GeoRaster(raster2, geot, nodata_value=self.nodata_value,\
projection=self.projection, datatype=self.datatypef block_reduce(self, block_size, how=np.ma.mean):
'''
geo.block_reduce(block_size, how=func)
Returns copy of raster aggregated to smaller resolution, by adding cells.
Default: func=np.ma.mean
'''
raster2 = block_reduce(self.raster, block_size, func=how)
geot = self.geot
geot = (geot[0], block_size[0] * geot[1], geot[2], geot[3], geot[4],
block_size[1] * geot[-1])
return GeoRaster(raster2, geot, nodata_value=self.nodata_value,\
projection=self.projection, datatype=self.datatype) | geo.block_reduce(block_size, how=func)
Returns copy of raster aggregated to smaller resolution, by adding cells.
Default: func=np.ma.mean |
def raster_weights(self, **kwargs):
if self.weights is None:
self.weights = raster_weights(self.raster, **kwargs)
pass | Compute neighbor weights for GeoRaster.
See help(gr.raster_weights) for options
Usage:
geo.raster_weights(rook=True) |
def pysal_G(self, **kwargs):
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.G = pysal.G(rasterf, self.weights, **kwargs) | Compute Getis and Ord’s G for GeoRaster
Usage:
geo.pysal_G(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.G
See help(gr.raster_weights), help(pysal.G) for options |
def pysal_Gamma(self, **kwargs):
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Gamma = pysal.Gamma(rasterf, self.weights, **kwargs) | Compute Gamma Index of Spatial Autocorrelation for GeoRaster
Usage:
geo.pysal_Gamma(permutations = 1000, rook=True, operation='c')
arguments passed to raster_weights() and pysal.Gamma
See help(gr.raster_weights), help(pysal.Gamma) for options |
def pysal_Join_Counts(self, **kwargs):
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Join_Counts = pysal.Join_Counts(rasterf, self.weights, **kwargs) | Compute join count statistics for GeoRaster
Usage:
geo.pysal_Join_Counts(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.Join_Counts
See help(gr.raster_weights), help(pysal.Join_Counts) for options |
def pysal_Moran(self, **kwargs):
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Moran = pysal.Moran(rasterf, self.weights, **kwargs) | Compute Moran's I measure of global spatial autocorrelation for GeoRaster
Usage:
geo.pysal_Moran(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.Moran
See help(gr.raster_weights), help(pysal.Moran) for options |
def pysal_Geary(self, **kwargs):
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Geary = pysal.Geary(rasterf, self.weights, **kwargs) | Compute Geary’s C for GeoRaster
Usage:
geo.pysal_C(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.Geary
See help(gr.raster_weights), help(pysal.Geary) for options |
def pysal_Moran_Local(self, **kwargs):
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Moran_Local = pysal.Moran_Local(rasterf, self.weights, **kwargs)
for i in self.Moran_Local.__dict__.keys():
if (isinstance(getattr(self.Moran_Local, i), np.ma.masked_array) or
(isinstance(getattr(self.Moran_Local, i), np.ndarray)) and
len(getattr(self.Moran_Local, i).shape) == 1):
setattr(self.Moran_Local, i, self.map_vector(getattr(self.Moran_Local, i))) | Compute Local Moran's I measure of local spatial autocorrelation for GeoRaster
Usage:
geo.pysal_Moran_Local(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.Moran_Local
See help(gr.raster_weights), help(pysal.Moran_Local) for options |
def mcp(self, *args, **kwargs):
# Create Cost surface to work on
self.mcp_cost = graph.MCP_Geometric(self.raster, *args, **kwargs) | Setup MCP_Geometric object from skimage for optimal travel time computations |
def notify(self, method, params=None):
log.debug('Sending notification: %s %s', method, params)
message = {
'jsonrpc': JSONRPC_VERSION,
'method': method,
}
if params is not None:
message['params'] = params
self._consumer(message) | Send a JSON RPC notification to the client.
Args:
method (str): The method name of the notification to send
params (any): The payload of the notification |
def request(self, method, params=None):
msg_id = self._id_generator()
log.debug('Sending request with id %s: %s %s', msg_id, method, params)
message = {
'jsonrpc': JSONRPC_VERSION,
'id': msg_id,
'method': method,
}
if params is not None:
message['params'] = params
request_future = futures.Future()
request_future.add_done_callback(self._cancel_callback(msg_id))
self._server_request_futures[msg_id] = request_future
self._consumer(message)
return request_future | Send a JSON RPC request to the client.
Args:
method (str): The method name of the message to send
params (any): The payload of the message
Returns:
Future that will resolve once a response has been received |
def _cancel_callback(self, request_id):
def callback(future):
if future.cancelled():
self.notify(CANCEL_METHOD, {'id': request_id})
future.set_exception(JsonRpcRequestCancelled())
return callback | Construct a cancellation callback for the given request ID. |
def consume(self, message):
if 'jsonrpc' not in message or message['jsonrpc'] != JSONRPC_VERSION:
log.warn("Unknown message type %s", message)
return
if 'id' not in message:
log.debug("Handling notification from client %s", message)
self._handle_notification(message['method'], message.get('params'))
elif 'method' not in message:
log.debug("Handling response from client %s", message)
self._handle_response(message['id'], message.get('result'), message.get('error'))
else:
try:
log.debug("Handling request from client %s", message)
self._handle_request(message['id'], message['method'], message.get('params'))
except JsonRpcException as e:
log.exception("Failed to handle request %s", message['id'])
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': message['id'],
'error': e.to_dict()
})
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle request %s", message['id'])
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': message['id'],
'error': JsonRpcInternalError.of(sys.exc_info()).to_dict()
}) | Consume a JSON RPC message from the client.
Args:
message (dict): The JSON RPC message sent by the client |
def _handle_notification(self, method, params):
if method == CANCEL_METHOD:
self._handle_cancel_notification(params['id'])
return
try:
handler = self._dispatcher[method]
except KeyError:
log.warn("Ignoring notification for unknown method %s", method)
return
try:
handler_result = handler(params)
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle notification %s: %s", method, params)
return
if callable(handler_result):
log.debug("Executing async notification handler %s", handler_result)
notification_future = self._executor_service.submit(handler_result)
notification_future.add_done_callback(self._notification_callback(method, params)) | Handle a notification from the client. |
def _notification_callback(method, params):
def callback(future):
try:
future.result()
log.debug("Successfully handled async notification %s %s", method, params)
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle async notification %s %s", method, params)
return callback | Construct a notification callback for the given request ID. |
def _handle_cancel_notification(self, msg_id):
request_future = self._client_request_futures.pop(msg_id, None)
if not request_future:
log.warn("Received cancel notification for unknown message id %s", msg_id)
return
# Will only work if the request hasn't started executing
if request_future.cancel():
log.debug("Cancelled request with id %s", msg_id) | Handle a cancel notification from the client. |
def _handle_request(self, msg_id, method, params):
try:
handler = self._dispatcher[method]
except KeyError:
raise JsonRpcMethodNotFound.of(method)
handler_result = handler(params)
if callable(handler_result):
log.debug("Executing async request handler %s", handler_result)
request_future = self._executor_service.submit(handler_result)
self._client_request_futures[msg_id] = request_future
request_future.add_done_callback(self._request_callback(msg_id))
else:
log.debug("Got result from synchronous request handler: %s", handler_result)
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': msg_id,
'result': handler_result
}) | Handle a request from the client. |
def _request_callback(self, request_id):
def callback(future):
# Remove the future from the client requests map
self._client_request_futures.pop(request_id, None)
if future.cancelled():
future.set_exception(JsonRpcRequestCancelled())
message = {
'jsonrpc': JSONRPC_VERSION,
'id': request_id,
}
try:
message['result'] = future.result()
except JsonRpcException as e:
log.exception("Failed to handle request %s", request_id)
message['error'] = e.to_dict()
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle request %s", request_id)
message['error'] = JsonRpcInternalError.of(sys.exc_info()).to_dict()
self._consumer(message)
return callback | Construct a request callback for the given request ID. |
def _handle_response(self, msg_id, result=None, error=None):
request_future = self._server_request_futures.pop(msg_id, None)
if not request_future:
log.warn("Received response to unknown message id %s", msg_id)
return
if error is not None:
log.debug("Received error response to message %s: %s", msg_id, error)
request_future.set_exception(JsonRpcException.from_dict(error))
log.debug("Received result for message %s: %s", msg_id, result)
request_future.set_result(result) | Handle a response from the client. |
def listen(self, message_consumer):
while not self._rfile.closed:
request_str = self._read_message()
if request_str is None:
break
try:
message_consumer(json.loads(request_str.decode('utf-8')))
except ValueError:
log.exception("Failed to parse JSON message %s", request_str)
continue | Blocking call to listen for messages on the rfile.
Args:
message_consumer (fn): function that is passed each message as it is read off the socket. |
def _read_message(self):
line = self._rfile.readline()
if not line:
return None
content_length = self._content_length(line)
# Blindly consume all header lines
while line and line.strip():
line = self._rfile.readline()
if not line:
return None
# Grab the body
return self._rfile.read(content_length) | Reads the contents of a message.
Returns:
body of message if parsable else None |
def _content_length(line):
if line.startswith(b'Content-Length: '):
_, value = line.split(b'Content-Length: ')
value = value.strip()
try:
return int(value)
except ValueError:
raise ValueError("Invalid Content-Length header: {}".format(value))
return None | Extract the content length from an input line. |
def hostapi_info(index=None):
if index is None:
return (hostapi_info(i) for i in range(_pa.Pa_GetHostApiCount()))
else:
info = _pa.Pa_GetHostApiInfo(index)
if not info:
raise RuntimeError("Invalid host API")
assert info.structVersion == 1
return {'name': ffi.string(info.name).decode(errors='ignore'),
'default_input_device': info.defaultInputDevice,
'default_output_device': info.defaultOutputDevice} | Return a generator with information about each host API.
If index is given, only one dictionary for the given host API is
returned. |
def device_info(index=None):
if index is None:
return (device_info(i) for i in range(_pa.Pa_GetDeviceCount()))
else:
info = _pa.Pa_GetDeviceInfo(index)
if not info:
raise RuntimeError("Invalid device")
assert info.structVersion == 2
if 'DirectSound' in hostapi_info(info.hostApi)['name']:
enc = 'mbcs'
else:
enc = 'utf-8'
return {'name': ffi.string(info.name).decode(encoding=enc,
errors='ignore'),
'hostapi': info.hostApi,
'max_input_channels': info.maxInputChannels,
'max_output_channels': info.maxOutputChannels,
'default_low_input_latency': info.defaultLowInputLatency,
'default_low_output_latency': info.defaultLowOutputLatency,
'default_high_input_latency': info.defaultHighInputLatency,
'default_high_output_latency': info.defaultHighOutputLatency,
'default_samplerate': info.defaultSampleRate} | Return a generator with information about each device.
If index is given, only one dictionary for the given device is
returned. |
def _get_stream_parameters(kind, device, channels, dtype, latency, samplerate):
if device is None:
if kind == 'input':
device = _pa.Pa_GetDefaultInputDevice()
elif kind == 'output':
device = _pa.Pa_GetDefaultOutputDevice()
info = device_info(device)
if channels is None:
channels = info['max_' + kind + '_channels']
dtype = np.dtype(dtype)
try:
sample_format = _np2pa[dtype]
except KeyError:
raise ValueError("Invalid " + kind + " sample format")
if samplerate is None:
samplerate = info['default_samplerate']
parameters = ffi.new(
"PaStreamParameters*",
(device, channels, sample_format, latency, ffi.NULL))
return parameters, dtype, samplerate | Generate PaStreamParameters struct. |
def _frombuffer(ptr, frames, channels, dtype):
framesize = channels * dtype.itemsize
data = np.frombuffer(ffi.buffer(ptr, frames * framesize), dtype=dtype)
data.shape = -1, channels
return data | Create NumPy array from a pointer to some memory. |
def _split(value):
if isinstance(value, str):
# iterable, but not meant for splitting
return value, value
try:
invalue, outvalue = value
except TypeError:
invalue = outvalue = value
except ValueError:
raise ValueError("Only single values and pairs are allowed")
return invalue, outvalue | Split input/output value into two values. |
def start(self):
err = _pa.Pa_StartStream(self._stream)
if err == _pa.paStreamIsNotStopped:
return
self._handle_error(err) | Commence audio processing.
If successful, the stream is considered active. |
def stop(self):
err = _pa.Pa_StopStream(self._stream)
if err == _pa.paStreamIsStopped:
return
self._handle_error(err) | Terminate audio processing.
This waits until all pending audio buffers have been played
before it returns. If successful, the stream is considered
inactive. |
def abort(self):
err = _pa.Pa_AbortStream(self._stream)
if err == _pa.paStreamIsStopped:
return
self._handle_error(err) | Terminate audio processing immediately.
This does not wait for pending audio buffers. If successful,
the stream is considered inactive. |
def read(self, frames, raw=False):
channels, _ = _split(self.channels)
dtype, _ = _split(self.dtype)
data = ffi.new("signed char[]", channels * dtype.itemsize * frames)
self._handle_error(_pa.Pa_ReadStream(self._stream, data, frames))
if not raw:
data = np.frombuffer(ffi.buffer(data), dtype=dtype)
data.shape = frames, channels
return data | Read samples from an input stream.
The function does not return until the required number of
frames has been read. This may involve waiting for the
operating system to supply the data.
If raw data is requested, the raw cffi data buffer is
returned. Otherwise, a numpy array of the appropriate dtype
with one column per channel is returned. |
def write(self, data):
frames = len(data)
_, channels = _split(self.channels)
_, dtype = _split(self.dtype)
if (not isinstance(data, np.ndarray) or data.dtype != dtype):
data = np.array(data, dtype=dtype)
if len(data.shape) == 1:
# play mono signals on all channels
data = np.tile(data, (channels, 1)).T
if data.shape[1] > channels:
data = data[:, :channels]
if data.shape < (frames, channels):
# if less data is available than requested, pad with zeros.
tmp = data
data = np.zeros((frames, channels), dtype=dtype)
data[:tmp.shape[0], :tmp.shape[1]] = tmp
data = data.ravel().tostring()
err = _pa.Pa_WriteStream(self._stream, data, frames)
self._handle_error(err) | Write samples to an output stream.
As much as one blocksize of audio data will be played
without blocking. If more than one blocksize was provided,
the function will only return when all but one blocksize
has been played.
Data will be converted to a numpy matrix. Multichannel data
should be provided as a (frames, channels) matrix. If the
data is provided as a 1-dim array, it will be treated as mono
data and will be played on all channels simultaneously. If the
data is provided as a 2-dim matrix and fewer tracks are
provided than channels, silence will be played on the missing
channels. Similarly, if more tracks are provided than there
are channels, the extraneous channels will not be played. |
def _handle_shell(self,cfg_file,*args,**options):
args = ("--interactive",) + args
return supervisorctl.main(("-c",cfg_file) + args) | Command 'supervisord shell' runs the interactive command shell. |
def _handle_getconfig(self,cfg_file,*args,**options):
if args:
raise CommandError("supervisor getconfig takes no arguments")
print cfg_file.read()
return 0 | Command 'supervisor getconfig' prints merged config to stdout. |
def _get_autoreload_programs(self,cfg_file):
cfg = RawConfigParser()
cfg.readfp(cfg_file)
reload_progs = []
for section in cfg.sections():
if section.startswith("program:"):
try:
if cfg.getboolean(section,"autoreload"):
reload_progs.append(section.split(":",1)[1])
except NoOptionError:
pass
return reload_progs | Get the set of programs to auto-reload when code changes.
Such programs will have autoreload=true in their config section.
This can be affected by config file sections or command-line
arguments, so we need to read it out of the merged config. |
def _find_live_code_dirs(self):
live_dirs = []
for mod in sys.modules.values():
# Get the directory containing that module.
# This is deliberately casting a wide net.
try:
dirnm = os.path.dirname(mod.__file__)
except AttributeError:
continue
# Normalize it for comparison purposes.
dirnm = os.path.realpath(os.path.abspath(dirnm))
if not dirnm.endswith(os.sep):
dirnm += os.sep
# Check that it's not an egg or some other wierdness
if not os.path.isdir(dirnm):
continue
# If it's a subdir of one we've already found, ignore it.
for dirnm2 in live_dirs:
if dirnm.startswith(dirnm2):
break
else:
# Remove any ones we've found that are subdirs of it.
live_dirs = [dirnm2 for dirnm2 in live_dirs\
if not dirnm2.startswith(dirnm)]
live_dirs.append(dirnm)
return live_dirs | Find all directories in which we might have live python code.
This walks all of the currently-imported modules and adds their
containing directory to the list of live dirs. After normalization
and de-duplication, we get a pretty good approximation of the
directories on sys.path that are actively in use. |
def render_config(data,ctx):
djsupervisor_tags.current_context = ctx
data = "{% load djsupervisor_tags %}" + data
t = template.Template(data)
c = template.Context(ctx)
return t.render(c).encode("ascii") | Render the given config data using Django's template system.
This function takes a config data string and a dict of context variables,
renders the data through Django's template system, and returns the result. |
def get_config_from_options(**options):
data = []
# Set whether or not to daemonize.
# Unlike supervisord, our default is to stay in the foreground.
data.append("[supervisord]\n")
if options.get("daemonize",False):
data.append("nodaemon=false\n")
else:
data.append("nodaemon=true\n")
if options.get("pidfile",None):
data.append("pidfile=%s\n" % (options["pidfile"],))
if options.get("logfile",None):
data.append("logfile=%s\n" % (options["logfile"],))
# Set which programs to launch automatically on startup.
for progname in options.get("launch",None) or []:
data.append("[program:%s]\nautostart=true\n" % (progname,))
for progname in options.get("nolaunch",None) or []:
data.append("[program:%s]\nautostart=false\n" % (progname,))
# Set which programs to include/exclude from the config
for progname in options.get("include",None) or []:
data.append("[program:%s]\nexclude=false\n" % (progname,))
for progname in options.get("exclude",None) or []:
data.append("[program:%s]\nexclude=true\n" % (progname,))
# Set which programs to autoreload when code changes.
# When this option is specified, the default for all other
# programs becomes autoreload=false.
if options.get("autoreload",None):
data.append("[program:autoreload]\nexclude=false\nautostart=true\n")
data.append("[program:__defaults__]\nautoreload=false\n")
for progname in options["autoreload"]:
data.append("[program:%s]\nautoreload=true\n" % (progname,))
# Set whether to use the autoreloader at all.
if options.get("noreload",False):
data.append("[program:autoreload]\nexclude=true\n")
return "".join(data) | Get config file fragment reflecting command-line options. |
def guess_project_dir():
projname = settings.SETTINGS_MODULE.split(".",1)[0]
projmod = import_module(projname)
projdir = os.path.dirname(projmod.__file__)
# For Django 1.3 and earlier, the manage.py file was located
# in the same directory as the settings file.
if os.path.isfile(os.path.join(projdir,"manage.py")):
return projdir
# For Django 1.4 and later, the manage.py file is located in
# the directory *containing* the settings file.
projdir = os.path.abspath(os.path.join(projdir, os.path.pardir))
if os.path.isfile(os.path.join(projdir,"manage.py")):
return projdir
msg = "Unable to determine the Django project directory;"\
" use --project-dir to specify it"
raise RuntimeError(msg) | Find the top-level Django project directory.
This function guesses the top-level Django project directory based on
the current environment. It looks for module containing the currently-
active settings module, in both pre-1.4 and post-1.4 layours. |
def set_if_missing(cfg,section,option,value):
try:
cfg.get(section,option)
except NoSectionError:
cfg.add_section(section)
cfg.set(section,option,value)
except NoOptionError:
cfg.set(section,option,value) | If the given option is missing, set to the given value. |
def rerender_options(options):
args = []
for name,value in options.iteritems():
name = name.replace("_","-")
if value is None:
pass
elif isinstance(value,bool):
if value:
args.append("--%s" % (name,))
elif isinstance(value,list):
for item in value:
args.append("--%s=%s" % (name,item))
else:
args.append("--%s=%s" % (name,value))
return " ".join(args) | Helper function to re-render command-line options.
This assumes that command-line options use the same name as their
key in the options dictionary. |
def login(self, email=None, password=None, user=None):
if user is not None:
data = {'login': user, 'password': password}
elif email is not None:
data = {'email': email, 'password': password}
else:
raise ValueError('Neither username nor email provided to login')
self.headers = {'connection': 'close'}
response = self.post('/session', **data)
self.token = response['private_token']
self.headers = {'PRIVATE-TOKEN': self.token,
'connection': 'close'}
return response | Logs the user in and setups the header with the private token
:param email: Gitlab user Email
:param user: Gitlab username
:param password: Gitlab user password
:return: True if login successful
:raise: HttpError
:raise: ValueError |
def get_users(self, search=None, page=1, per_page=20, **kwargs):
if search:
return self.get('/users', page=page, per_page=per_page, search=search, **kwargs)
return self.get('/users', page=page, per_page=per_page, **kwargs) | Returns a list of users from the Gitlab server
:param search: Optional search query
:param page: Page number (default: 1)
:param per_page: Number of items to list per page (default: 20, max: 100)
:return: List of Dictionaries containing users
:raise: HttpError if invalid response returned |
def getusers(self, search=None, page=1, per_page=20, **kwargs):
return self.get_users(search=search, page=page, per_page=per_page, **kwargs) | Returns a list of users from the Gitlab server
.. warning:: Warning this is being deprecated please use :func:`gitlab.Gitlab.get_users`
:param search: Optional search query
:param page: Page number (default: 1)
:param per_page: Number of items to list per page (default: 20, max: 100)
:return: returns a dictionary of the users, false if there is an error |
def getuser(self, user_id):
request = requests.get(
'{0}/{1}'.format(self.users_url, user_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Get info for a user identified by id
:param user_id: id of the user
:return: False if not found, a dictionary if found |
def createuser(self, name, username, password, email, **kwargs):
data = {'name': name, 'username': username, 'password': password, 'email': email}
if kwargs:
data.update(kwargs)
request = requests.post(
self.users_url, headers=self.headers, data=data,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
elif request.status_code == 404:
return False | Create a user
:param name: Obligatory
:param username: Obligatory
:param password: Obligatory
:param email: Obligatory
:param kwargs: Any param the the Gitlab API supports
:return: True if the user was created,false if it wasn't(already exists) |
def deleteuser(self, user_id):
deleted = self.delete_user(user_id)
if deleted is False:
return False
else:
return True | Deletes a user. Available only for administrators.
This is an idempotent function, calling this function for a non-existent user id
still returns a status code 200 OK.
The JSON response differs if the user was actually deleted or not.
In the former the user is returned and in the latter not.
.. warning:: Warning this is being deprecated please use :func:`gitlab.Gitlab.delete_user`
:param user_id: The ID of the user
:return: True if it deleted, False if it couldn't |
def currentuser(self):
request = requests.get(
'{0}/api/v3/user'.format(self.host),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.json() | Returns the current user parameters. The current user is linked to the secret token
:return: a list with the current user properties |
def edituser(self, user_id, **kwargs):
data = {}
if kwargs:
data.update(kwargs)
request = requests.put(
'{0}/{1}'.format(self.users_url, user_id),
headers=self.headers, data=data, timeout=self.timeout, verify=self.verify_ssl, auth=self.auth)
if request.status_code == 200:
return request.json()
else:
return False | Edits an user data.
:param user_id: id of the user to change
:param kwargs: Any param the the Gitlab API supports
:return: Dict of the user |
def getsshkeys(self):
request = requests.get(
self.keys_url, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Gets all the ssh keys for the current user
:return: a dictionary with the lists |
def addsshkey(self, title, key):
data = {'title': title, 'key': key}
request = requests.post(
self.keys_url, headers=self.headers, data=data,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return True
else:
return False | Add a new ssh key for the current user
:param title: title of the new key
:param key: the key itself
:return: true if added, false if it didn't add it (it could be because the name or key already exists) |
def addsshkeyuser(self, user_id, title, key):
data = {'title': title, 'key': key}
request = requests.post(
'{0}/{1}/keys'.format(self.users_url, user_id), headers=self.headers,
data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return True
else:
return False | Add a new ssh key for the user identified by id
:param user_id: id of the user to add the key to
:param title: title of the new key
:param key: the key itself
:return: true if added, false if it didn't add it (it could be because the name or key already exists) |
def deletesshkey(self, key_id):
request = requests.delete(
'{0}/{1}'.format(self.keys_url, key_id), headers=self.headers,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.content == b'null':
return False
else:
return True | Deletes an sshkey for the current user identified by id
:param key_id: the id of the key
:return: False if it didn't delete it, True if it was deleted |
def get(self, uri, default_response=None, **kwargs):
url = self.api_url + uri
response = requests.get(url, params=kwargs, headers=self.headers,
verify=self.verify_ssl, auth=self.auth,
timeout=self.timeout)
return self.success_or_raise(response, default_response=default_response) | Call GET on the Gitlab server
>>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False)
>>> gitlab.login(user='root', password='5iveL!fe')
>>> gitlab.get('/users/5')
:param uri: String with the URI for the endpoint to GET from
:param default_response: Return value if JSONDecodeError
:param kwargs: Key word arguments to use as GET arguments
:return: Dictionary containing response data
:raise: HttpError: If invalid response returned |
def post(self, uri, default_response=None, **kwargs):
url = self.api_url + uri
response = requests.post(
url, headers=self.headers, data=kwargs,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return self.success_or_raise(response, default_response=default_response) | Call POST on the Gitlab server
>>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False)
>>> gitlab.login(user='root', password='5iveL!fe')
>>> password = 'MyTestPassword1'
>>> email = '[email protected]'
>>> data = {'name': 'test', 'username': 'test1', 'password': password, 'email': email}
>>> gitlab.post('/users/5', **data)
:param uri: String with the URI for the endpoint to POST to
:param default_response: Return value if JSONDecodeError
:param kwargs: Key word arguments representing the data to use in the POST
:return: Dictionary containing response data
:raise: HttpError: If invalid response returned |
def delete(self, uri, default_response=None):
url = self.api_url + uri
response = requests.delete(
url, headers=self.headers, verify=self.verify_ssl,
auth=self.auth, timeout=self.timeout)
return self.success_or_raise(response, default_response=default_response) | Call DELETE on the Gitlab server
>>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False)
>>> gitlab.login(user='root', password='5iveL!fe')
>>> gitlab.delete('/users/5')
:param uri: String with the URI you wish to delete
:param default_response: Return value if JSONDecodeError
:return: Dictionary containing response data
:raise: HttpError: If invalid response returned |
def success_or_raise(self, response, default_response=None):
if self.suppress_http_error and not response.ok:
return False
response_json = default_response
if response_json is None:
response_json = {}
response.raise_for_status()
try:
response_json = response.json()
except ValueError:
pass
return response_json | Check if request was successful or raises an HttpError
:param response: Response Object to check
:param default_response: Return value if JSONDecodeError
:returns dict: Dictionary containing response data
:returns bool: :obj:`False` on failure when exceptions are suppressed
:raises requests.exceptions.HTTPError: If invalid response returned |
def getall(fn, page=None, *args, **kwargs):
if not page:
page = 1
while True:
results = fn(*args, page=page, **kwargs)
if not results:
break
for x in results:
yield x
page += 1 | Auto-iterate over the paginated results of various methods of the API.
Pass the GitLabAPI method as the first argument, followed by the
other parameters as normal. Include `page` to determine first page to poll.
Remaining kwargs are passed on to the called method, including `per_page`.
:param fn: Actual method to call
:param page: Optional, page number to start at, defaults to 1
:param args: Positional arguments to actual method
:param kwargs: Keyword arguments to actual method
:return: Yields each item in the result until exhausted, and then implicit StopIteration; or no elements if error |
def setsudo(self, user=None):
if user is None:
try:
self.headers.pop('SUDO')
except KeyError:
pass
else:
self.headers['SUDO'] = user | Set the subsequent API calls to the user provided
:param user: User id or username to change to, None to return to the logged user
:return: Nothing |
def get_project(self, project):
project = format_string(project)
return self.get(
'/projects/{project}'.format(project=project)) | Get info for a project identified by id or namespace/project_name
:param project: The ID or URL-encoded path of the project
:return: Dictionary containing the Project
:raise: HttpError: If invalid response returned |
def createproject(self, name, **kwargs):
data = {'name': name}
if kwargs:
data.update(kwargs)
request = requests.post(
self.projects_url, headers=self.headers, data=data,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
elif request.status_code == 403:
if 'Your own projects limit is 0' in request.text:
print(request.text)
return False
else:
return False | Creates a new project owned by the authenticated user.
:param name: new project name
:param path: custom repository name for new project. By default generated based on name
:param namespace_id: namespace for the new project (defaults to user)
:param description: short project description
:param issues_enabled:
:param merge_requests_enabled:
:param wiki_enabled:
:param snippets_enabled:
:param public: if true same as setting visibility_level = 20
:param visibility_level:
:param sudo:
:param import_url:
:return: |
def editproject(self, project_id, **kwargs):
data = {"id": project_id}
if kwargs:
data.update(kwargs)
request = requests.put(
'{0}/{1}'.format(self.projects_url, project_id), headers=self.headers,
data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return True
elif request.status_code == 400:
if "Your param's are invalid" in request.text:
print(request.text)
return False
else:
return False | Edit an existing project.
:param name: new project name
:param path: custom repository name for new project. By default generated based on name
:param default_branch: they default branch
:param description: short project description
:param issues_enabled:
:param merge_requests_enabled:
:param wiki_enabled:
:param snippets_enabled:
:param public: if true same as setting visibility_level = 20
:param visibility_level:
:return: |
def shareproject(self, project_id, group_id, group_access):
data = {'id': project_id, 'group_id': group_id, 'group_access': group_access}
request = requests.post(
'{0}/{1}/share'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl)
return request.status_code == 201 | Allow to share project with group.
:param project_id: The ID of a project
:param group_id: The ID of a group
:param group_access: Level of permissions for sharing
:return: True is success |
def delete_project(self, id):
url = '/projects/{id}'.format(id=id)
response = self.delete(url)
if response is True:
return {}
else:
return response | Delete a project from the Gitlab server
Gitlab currently returns a Boolean True if the deleted and as such we return an
empty Dictionary
:param id: The ID of the project or NAMESPACE/PROJECT_NAME
:return: Dictionary
:raise: HttpError: If invalid response returned |
def createprojectuser(self, user_id, name, **kwargs):
data = {'name': name}
if kwargs:
data.update(kwargs)
request = requests.post(
'{0}/user/{1}'.format(self.projects_url, user_id), headers=self.headers,
data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return True
else:
return False | Creates a new project owned by the specified user. Available only for admins.
:param user_id: user_id of owner
:param name: new project name
:param description: short project description
:param default_branch: 'master' by default
:param issues_enabled:
:param merge_requests_enabled:
:param wiki_enabled:
:param snippets_enabled:
:param public: if true same as setting visibility_level = 20
:param visibility_level:
:param import_url:
:param sudo:
:return: |
def addprojectmember(self, project_id, user_id, access_level):
# if isinstance(access_level, basestring):
if access_level.lower() == 'master':
access_level = 40
elif access_level.lower() == 'developer':
access_level = 30
elif access_level.lower() == 'reporter':
access_level = 20
else:
access_level = 10
data = {'id': project_id, 'user_id': user_id, 'access_level': access_level}
request = requests.post(
'{0}/{1}/members'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return True
else:
return False | Adds a project member to a project
:param project_id: project id
:param user_id: user id
:param access_level: access level, see gitlab help to know more
:return: True if success |
def deleteprojectmember(self, project_id, user_id):
request = requests.delete(
'{0}/{1}/members/{2}'.format(self.projects_url, project_id, user_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return True | Delete a project member
:param project_id: project id
:param user_id: user id
:return: always true |
def addprojecthook(self, project_id, url, push=False, issues=False, merge_requests=False, tag_push=False):
data = {
'id': project_id,
'url': url,
'push_events': int(bool(push)),
'issues_events': int(bool(issues)),
'merge_requests_events': int(bool(merge_requests)),
'tag_push_events': int(bool(tag_push)),
}
request = requests.post(
'{0}/{1}/hooks'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | add a hook to a project
:param project_id: project id
:param url: url of the hook
:return: True if success |
def editprojecthook(self, project_id, hook_id, url, push=False, issues=False, merge_requests=False, tag_push=False):
data = {
"id": project_id,
"hook_id": hook_id,
"url": url,
'push_events': int(bool(push)),
'issues_events': int(bool(issues)),
'merge_requests_events': int(bool(merge_requests)),
'tag_push_events': int(bool(tag_push)),
}
request = requests.put(
'{0}/{1}/hooks/{2}'.format(self.projects_url, project_id, hook_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return True
else:
return False | edit an existing hook from a project
:param id_: project id
:param hook_id: hook id
:param url: the new url
:return: True if success |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.